From 74bdccba8beae606fe2e7d77a50a12003259f00b Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Mon, 21 Jun 2021 01:52:42 +0000 Subject: [PATCH] CodeGen from PR 14770 in Azure/azure-rest-api-specs [DataFacory]Update avroCompressionCodec and orcCompressionCodec from string to object (#14770) * [DataFacory]Update avroCompressionCodec and orcCompressionCodec from string to object * update json --- src/datafactory/HISTORY.rst | 18 - .../generated/_client_factory.py | 12 + .../azext_datafactory/generated/_help.py | 106 +- .../azext_datafactory/generated/_params.py | 83 +- .../azext_datafactory/generated/action.py | 82 +- .../azext_datafactory/generated/commands.py | 30 + .../azext_datafactory/generated/custom.py | 96 +- .../tests/latest/example_steps.py | 62 +- .../tests/latest/test_datafactory_scenario.py | 1 + .../test_datafactory_scenario_coverage.md | 48 - .../_data_factory_management_client.py | 1 - .../datafactory/aio/_configuration_async.py | 67 - .../aio/_data_factory_management_client.py | 1 - .../_data_factory_management_client_async.py | 143 -- .../aio/operations_async/__init__.py | 45 - .../_activity_run_operations_async.py | 127 -- ...ata_flow_debug_session_operations_async.py | 551 ------- .../_data_flow_operations_async.py | 309 ---- .../_dataset_operations_async.py | 311 ---- .../_exposure_control_operations_async.py | 241 --- .../_factory_operations_async.py | 658 -------- ...tegration_runtime_node_operations_async.py | 301 ---- ...untime_object_metadata_operations_async.py | 230 --- .../_integration_runtime_operations_async.py | 1176 ------------- .../_linked_service_operations_async.py | 312 ---- ...naged_private_endpoint_operations_async.py | 336 ---- ...anaged_virtual_network_operations_async.py | 255 --- .../_operation_operations_async.py | 101 -- .../_pipeline_operations_async.py | 405 ----- .../_pipeline_run_operations_async.py | 243 --- .../_trigger_operations_async.py | 877 ---------- .../_trigger_run_operations_async.py | 241 --- .../datafactory/models/__init__.py | 13 +- .../_data_factory_management_client_enums.py | 36 +- .../datafactory/models/_models.py | 930 ++++++++++- .../datafactory/models/_models_py3.py | 1448 ++++++++++++++--- .../operations/_activity_run_operations.py | 132 -- .../operations/_data_flow_operations.py | 317 ---- .../operations/_dataset_operations.py | 319 ---- .../operations/_factory_operations.py | 671 -------- .../_integration_runtime_node_operations.py | 309 ---- .../_integration_runtime_operations.py | 1198 -------------- .../operations/_linked_service_operations.py | 320 ---- .../_managed_private_endpoint_operations.py | 344 ---- .../_managed_virtual_network_operations.py | 262 --- .../operations/_operation_operations.py | 106 -- .../operations/_pipeline_operations.py | 414 ----- .../operations/_pipeline_run_operations.py | 250 --- .../operations/_trigger_operations.py | 895 ---------- .../operations/_trigger_run_operations.py | 248 --- src/datafactory/gen.zip | Bin 39102 -> 0 bytes src/datafactory/linter_exclusions.yml | 5 - src/datafactory/report.md | 120 +- 53 files changed, 2642 insertions(+), 13164 deletions(-) delete mode 100644 src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py delete mode 100644 src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py delete mode 100644 src/datafactory/gen.zip delete mode 100644 src/datafactory/linter_exclusions.yml diff --git a/src/datafactory/HISTORY.rst b/src/datafactory/HISTORY.rst index f03bb41574e..1c139576ba0 100644 --- a/src/datafactory/HISTORY.rst +++ b/src/datafactory/HISTORY.rst @@ -3,24 +3,6 @@ Release History =============== -0.3.0 -+++++ -* [BREAKING CHANGE] Renamed command subgroup `az datafactory factory` to `az datafactory`. -* [BREAKING CHANGE] `az datafactory integration-runtime managed create`: `--type-properties-compute-properties` renamed to `--compute-properties`, - `--type-properties-ssis-properties` renamed to `--ssis-properties`. -* [BREAKING CHANGE] `az datafactory integration-runtime self-hosted create`: `--type-properties-linked-info` renamed to `--linked-info`. -* [BREAKING CHANGE] `az datafactory integration-runtime update`: `--properties` renamed to `--linked-service`. -* [BREAKING CHANGE] `az datafactory linked-service delete`: `--properties` renamed to `--dataset`. -* [BREAKING CHANGE] `az datafactory trigger list`: `--properties` renamed to `--trigger`. - -0.2.1 -+++++ -* az datafactory factory create: Enable managed identity by default - -0.2.0 -++++++ -* add update command for linked services and triggers and datasets - 0.1.0 ++++++ * Initial release. diff --git a/src/datafactory/azext_datafactory/generated/_client_factory.py b/src/datafactory/azext_datafactory/generated/_client_factory.py index 7db87b484da..837a01be1e4 100644 --- a/src/datafactory/azext_datafactory/generated/_client_factory.py +++ b/src/datafactory/azext_datafactory/generated/_client_factory.py @@ -54,3 +54,15 @@ def cf_trigger(cli_ctx, *_): def cf_trigger_run(cli_ctx, *_): return cf_datafactory_cl(cli_ctx).trigger_runs + + +def cf_private_end_point_connection(cli_ctx, *_): + return cf_datafactory_cl(cli_ctx).private_end_point_connections + + +def cf_private_endpoint_connection(cli_ctx, *_): + return cf_datafactory_cl(cli_ctx).private_endpoint_connection + + +def cf_private_link_resource(cli_ctx, *_): + return cf_datafactory_cl(cli_ctx).private_link_resources diff --git a/src/datafactory/azext_datafactory/generated/_help.py b/src/datafactory/azext_datafactory/generated/_help.py index fd2ab1dcd0e..98e3ee0a982 100644 --- a/src/datafactory/azext_datafactory/generated/_help.py +++ b/src/datafactory/azext_datafactory/generated/_help.py @@ -69,6 +69,14 @@ collaboration-branch: Required. Collaboration branch. root-folder: Required. Root folder. last-commit-id: Last commit id. + - name: --identity + short-summary: "User assigned identity to use to authenticate to customer's key vault. If not provided Managed \ +Service Identity will be used." + long-summary: | + Usage: --identity user-assigned-identity=XX + + user-assigned-identity: The resource id of the user assigned identity to authenticate to customer's key \ +vault. examples: - name: Factories_CreateOrUpdate text: |- @@ -206,6 +214,13 @@ helps['datafactory integration-runtime managed create'] = """ type: command short-summary: "Create an integration runtime." + parameters: + - name: --managed-virtual-network + short-summary: "Managed Virtual Network reference." + long-summary: | + Usage: --managed-virtual-network reference-name=XX + + reference-name: Required. Reference ManagedVirtualNetwork name. """ helps['datafactory integration-runtime self-hosted'] = """ @@ -591,7 +606,7 @@ taset\\"}],\\"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"\ type\\":\\"BlobSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline(\ ).parameters.OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration \ -"0.00:10:00" --name "examplePipeline" --resource-group "exampleResourceGroup" +"0.00:10:00" --pipeline-name "examplePipeline" --resource-group "exampleResourceGroup" """ helps['datafactory pipeline delete'] = """ @@ -902,3 +917,92 @@ az datafactory trigger-run rerun --factory-name "exampleFactoryName" --resource-group \ "exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" """ + +helps['datafactory private-end-point-connection'] = """ + type: group + short-summary: Manage private end point connection with datafactory +""" + +helps['datafactory private-end-point-connection list'] = """ + type: command + short-summary: "Lists Private endpoint connections." + examples: + - name: privateEndPointConnections_ListByFactory + text: |- + az datafactory private-end-point-connection list --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +""" + +helps['datafactory private-endpoint-connection'] = """ + type: group + short-summary: Manage private endpoint connection with datafactory +""" + +helps['datafactory private-endpoint-connection show'] = """ + type: command + short-summary: "Gets a private endpoint connection." + examples: + - name: Get a private endpoint connection for a datafactory. + text: |- + az datafactory private-endpoint-connection show --factory-name "exampleFactoryName" --name "connection" \ +--resource-group "exampleResourceGroup" +""" + +helps['datafactory private-endpoint-connection create'] = """ + type: command + short-summary: "Approves or rejects a private endpoint connection." + parameters: + - name: --private-link-service-connection-state + short-summary: "The state of a private link connection" + long-summary: | + Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX + + status: Status of a private link connection + description: Description of a private link connection + actions-required: ActionsRequired for a private link connection + examples: + - name: Approves or rejects a private endpoint connection for a factory. + text: |- + az datafactory private-endpoint-connection create --factory-name "exampleFactoryName" --name \ +"connection" --private-link-service-connection-state description="Approved by admin." actions-required="" \ +status="Approved" --resource-group "exampleResourceGroup" +""" + +helps['datafactory private-endpoint-connection update'] = """ + type: command + short-summary: "Approves or rejects a private endpoint connection." + parameters: + - name: --private-link-service-connection-state + short-summary: "The state of a private link connection" + long-summary: | + Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX + + status: Status of a private link connection + description: Description of a private link connection + actions-required: ActionsRequired for a private link connection +""" + +helps['datafactory private-endpoint-connection delete'] = """ + type: command + short-summary: "Deletes a private endpoint connection." + examples: + - name: Delete a private endpoint connection for a datafactory. + text: |- + az datafactory private-endpoint-connection delete --factory-name "exampleFactoryName" --name \ +"connection" --resource-group "exampleResourceGroup" +""" + +helps['datafactory private-link-resource'] = """ + type: group + short-summary: Manage private link resource with datafactory +""" + +helps['datafactory private-link-resource show'] = """ + type: command + short-summary: "Gets the private link resources." + examples: + - name: Get private link resources of a site + text: |- + az datafactory private-link-resource show --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +""" diff --git a/src/datafactory/azext_datafactory/generated/_params.py b/src/datafactory/azext_datafactory/generated/_params.py index 2162b81c231..7792d0e770d 100644 --- a/src/datafactory/azext_datafactory/generated/_params.py +++ b/src/datafactory/azext_datafactory/generated/_params.py @@ -24,9 +24,12 @@ from azext_datafactory.action import ( AddFactoryVstsConfiguration, AddFactoryGitHubConfiguration, + AddIdentity, + AddManagedVirtualNetwork, AddFolder, AddFilters, - AddOrderBy + AddOrderBy, + AddPrivateLinkServiceConnectionState ) @@ -57,12 +60,33 @@ def load_arguments(self, _): 'GitHub repo information.', arg_group='RepoConfiguration') c.argument('global_parameters', type=validate_file_or_dict, help='List of parameters for factory. Expected ' 'value: json-string/@json-file.') + c.argument('public_network_access', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Whether or not ' + 'public network access is allowed for the data factory.') + c.argument('key_name', type=str, help='The name of the key in Azure Key Vault to use as Customer Managed Key.', + arg_group='Encryption') + c.argument('vault_base_url', type=str, help='The url of the Azure Key Vault used for CMK.', + arg_group='Encryption') + c.argument('key_version', type=str, help='The version of the key used for CMK. If not provided, latest version ' + 'will be used.', arg_group='Encryption') + c.argument('identity', action=AddIdentity, nargs='+', help='User assigned identity to use to authenticate to ' + 'customer\'s key vault. If not provided Managed Service Identity will be used.', + arg_group='Encryption') + c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', + 'SystemAssigned,UserAssigned']), + help='The identity type.', arg_group='Identity') + c.argument('user_assigned_identities', type=validate_file_or_dict, help='List of user assigned identities for ' + 'the factory. Expected value: json-string/@json-file.', arg_group='Identity') with self.argument_context('datafactory update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', id_part='name') c.argument('tags', tags_type) + c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', + 'SystemAssigned,UserAssigned']), + help='The identity type.', arg_group='Identity') + c.argument('user_assigned_identities', type=validate_file_or_dict, help='List of user assigned identities for ' + 'the factory. Expected value: json-string/@json-file.', arg_group='Identity') with self.argument_context('datafactory delete') as c: c.argument('resource_group_name', resource_group_name_type) @@ -133,6 +157,8 @@ def load_arguments(self, _): c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for ' 'update, for which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='Integration runtime description.') + c.argument('managed_virtual_network', action=AddManagedVirtualNetwork, nargs='+', help='Managed Virtual ' + 'Network reference.') c.argument('compute_properties', type=validate_file_or_dict, help='The compute resource for managed ' 'integration runtime. Expected value: json-string/@json-file.', arg_group='Type Properties') c.argument('ssis_properties', type=validate_file_or_dict, help='SSIS properties for managed integration ' @@ -386,8 +412,7 @@ def load_arguments(self, _): with self.argument_context('datafactory pipeline update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' - 'name.', id_part='child_name_1') + c.argument('pipeline_name', type=str, help='The pipeline name.', id_part='child_name_1') c.argument('if_match', type=str, help='ETag of the pipeline entity. Should only be specified for update, for ' 'which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='The description of the pipeline.') @@ -404,8 +429,7 @@ def load_arguments(self, _): 'json-string/@json-file.') c.argument('duration', type=validate_file_or_dict, help='TimeSpan value, after which an Azure Monitoring ' 'Metric is fired. Expected value: json-string/@json-file.', arg_group='Policy Elapsed Time Metric') - c.argument('folder_name', type=str, help='The name of the folder that this Pipeline is in.', - arg_group='Folder') + c.argument('name', type=str, help='The name of the folder that this Pipeline is in.', arg_group='Folder') c.ignore('pipeline') with self.argument_context('datafactory pipeline delete') as c: @@ -578,3 +602,52 @@ def load_arguments(self, _): c.argument('factory_name', type=str, help='The factory name.', id_part='name') c.argument('trigger_name', type=str, help='The trigger name.', id_part='child_name_1') c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_2') + + with self.argument_context('datafactory private-end-point-connection list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.') + + with self.argument_context('datafactory private-endpoint-connection show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam' + 'e'], type=str, help='The private endpoint ' + 'connection name.', id_part='child_name_1') + c.argument('if_none_match', type=str, help='ETag of the private endpoint connection entity. Should only be ' + 'specified for get. If the ETag matches the existing entity tag, or if * was provided, then no ' + 'content will be returned.') + + with self.argument_context('datafactory private-endpoint-connection create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.') + c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam' + 'e'], type=str, help='The private endpoint ' + 'connection name.') + c.argument('if_match', type=str, help='ETag of the private endpoint connection entity. Should only be ' + 'specified for update, for which it should match existing entity or can be * for unconditional ' + 'update.') + c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+', + help='The state of a private link connection') + + with self.argument_context('datafactory private-endpoint-connection update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam' + 'e'], type=str, help='The private endpoint ' + 'connection name.', id_part='child_name_1') + c.argument('if_match', type=str, help='ETag of the private endpoint connection entity. Should only be ' + 'specified for update, for which it should match existing entity or can be * for unconditional ' + 'update.') + c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+', + help='The state of a private link connection') + + with self.argument_context('datafactory private-endpoint-connection delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') + c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam' + 'e'], type=str, help='The private endpoint ' + 'connection name.', id_part='child_name_1') + + with self.argument_context('datafactory private-link-resource show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', type=str, help='The factory name.', id_part='name') diff --git a/src/datafactory/azext_datafactory/generated/action.py b/src/datafactory/azext_datafactory/generated/action.py index f645d72981a..9cac9990d5e 100644 --- a/src/datafactory/azext_datafactory/generated/action.py +++ b/src/datafactory/azext_datafactory/generated/action.py @@ -90,6 +90,57 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use return d +class AddIdentity(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.identity = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'user-assigned-identity': + d['user_assigned_identity'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter identity. All possible keys are: ' + 'user-assigned-identity'.format(k)) + return d + + +class AddManagedVirtualNetwork(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.managed_virtual_network = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + d['type'] = "ManagedVirtualNetworkReference" + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'reference-name': + d['reference_name'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter managed_virtual_network. All possible ' + 'keys are: reference-name'.format(k)) + return d + + class AddFolder(argparse.Action): def __call__(self, parser, namespace, values, option_string=None): action = self.get_action(values, option_string) @@ -111,7 +162,7 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use d['name'] = v[0] else: raise CLIError('Unsupported Key {} is provided for parameter folder. All possible keys are: name'. - format(k)) + format(k)) return d @@ -169,3 +220,32 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use raise CLIError('Unsupported Key {} is provided for parameter order_by. All possible keys are: ' 'order-by, order'.format(k)) return d + + +class AddPrivateLinkServiceConnectionState(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.private_link_service_connection_state = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'status': + d['status'] = v[0] + elif kl == 'description': + d['description'] = v[0] + elif kl == 'actions-required': + d['actions_required'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter private_link_service_connection_state. ' + 'All possible keys are: status, description, actions-required'.format(k)) + return d diff --git a/src/datafactory/azext_datafactory/generated/commands.py b/src/datafactory/azext_datafactory/generated/commands.py index df59d171a88..f5a297d1496 100644 --- a/src/datafactory/azext_datafactory/generated/commands.py +++ b/src/datafactory/azext_datafactory/generated/commands.py @@ -157,3 +157,33 @@ def load_command_table(self, _): g.custom_command('cancel', 'datafactory_trigger_run_cancel') g.custom_command('query-by-factory', 'datafactory_trigger_run_query_by_factory') g.custom_command('rerun', 'datafactory_trigger_run_rerun') + + from azext_datafactory.generated._client_factory import cf_private_end_point_connection + datafactory_private_end_point_connection = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._private_end_point_connections_operatio' + 'ns#PrivateEndPointConnectionsOperations.{}', + client_factory=cf_private_end_point_connection) + with self.command_group('datafactory private-end-point-connection', datafactory_private_end_point_connection, + client_factory=cf_private_end_point_connection) as g: + g.custom_command('list', 'datafactory_private_end_point_connection_list') + + from azext_datafactory.generated._client_factory import cf_private_endpoint_connection + datafactory_private_endpoint_connection = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._private_endpoint_connection_operations' + '#PrivateEndpointConnectionOperations.{}', + client_factory=cf_private_endpoint_connection) + with self.command_group('datafactory private-endpoint-connection', datafactory_private_endpoint_connection, + client_factory=cf_private_endpoint_connection) as g: + g.custom_show_command('show', 'datafactory_private_endpoint_connection_show') + g.custom_command('create', 'datafactory_private_endpoint_connection_create') + g.custom_command('update', 'datafactory_private_endpoint_connection_update') + g.custom_command('delete', 'datafactory_private_endpoint_connection_delete', confirmation=True) + + from azext_datafactory.generated._client_factory import cf_private_link_resource + datafactory_private_link_resource = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._private_link_resources_operations#Priv' + 'ateLinkResourcesOperations.{}', + client_factory=cf_private_link_resource) + with self.command_group('datafactory private-link-resource', datafactory_private_link_resource, + client_factory=cf_private_link_resource) as g: + g.custom_show_command('show', 'datafactory_private_link_resource_show') diff --git a/src/datafactory/azext_datafactory/generated/custom.py b/src/datafactory/azext_datafactory/generated/custom.py index c269c1999ff..503a2436998 100644 --- a/src/datafactory/azext_datafactory/generated/custom.py +++ b/src/datafactory/azext_datafactory/generated/custom.py @@ -39,7 +39,14 @@ def datafactory_create(client, tags=None, factory_vsts_configuration=None, factory_git_hub_configuration=None, - global_parameters=None): + global_parameters=None, + public_network_access=None, + key_name=None, + vault_base_url=None, + key_version=None, + identity=None, + type_=None, + user_assigned_identities=None): all_repo_configuration = [] if factory_vsts_configuration is not None: all_repo_configuration.append(factory_vsts_configuration) @@ -54,8 +61,15 @@ def datafactory_create(client, factory['tags'] = tags factory['repo_configuration'] = repo_configuration factory['global_parameters'] = global_parameters + factory['public_network_access'] = public_network_access factory['encryption'] = {} + factory['encryption']['key_name'] = key_name + factory['encryption']['vault_base_url'] = vault_base_url + factory['encryption']['key_version'] = key_version + factory['encryption']['identity'] = identity factory['identity'] = {} + factory['identity']['type'] = type_ + factory['identity']['user_assigned_identities'] = user_assigned_identities return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, if_match=if_match, @@ -65,10 +79,14 @@ def datafactory_create(client, def datafactory_update(client, resource_group_name, factory_name, - tags=None): + tags=None, + type_=None, + user_assigned_identities=None): factory_update_parameters = {} factory_update_parameters['tags'] = tags factory_update_parameters['identity'] = {} + factory_update_parameters['identity']['type'] = type_ + factory_update_parameters['identity']['user_assigned_identities'] = user_assigned_identities return client.update(resource_group_name=resource_group_name, factory_name=factory_name, factory_update_parameters=factory_update_parameters) @@ -179,12 +197,14 @@ def datafactory_integration_runtime_managed_create(client, integration_runtime_name, if_match=None, description=None, + managed_virtual_network=None, compute_properties=None, ssis_properties=None): integration_runtime = {} integration_runtime['properties'] = {} integration_runtime['properties']['type'] = 'Managed' integration_runtime['properties']['description'] = description + integration_runtime['properties']['managed_virtual_network'] = managed_virtual_network integration_runtime['properties']['compute_properties'] = compute_properties integration_runtime['properties']['ssis_properties'] = ssis_properties return client.create_or_update(resource_group_name=resource_group_name, @@ -566,7 +586,7 @@ def datafactory_pipeline_update(instance, annotations=None, run_dimensions=None, duration=None, - folder_name=None): + name=None): if description is not None: instance.description = description if activities is not None: @@ -583,8 +603,8 @@ def datafactory_pipeline_update(instance, instance.run_dimensions = run_dimensions if duration is not None: instance.elapsed_time_metric.duration = duration - if folder_name is not None: - instance.folder.name = folder_name + if name is not None: + instance.folder.name = name return instance @@ -841,3 +861,69 @@ def datafactory_trigger_run_rerun(client, factory_name=factory_name, trigger_name=trigger_name, run_id=run_id) + + +def datafactory_private_end_point_connection_list(client, + resource_group_name, + factory_name): + return client.list_by_factory(resource_group_name=resource_group_name, + factory_name=factory_name) + + +def datafactory_private_endpoint_connection_show(client, + resource_group_name, + factory_name, + private_endpoint_connection_name, + if_none_match=None): + return client.get(resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name, + if_none_match=if_none_match) + + +def datafactory_private_endpoint_connection_create(client, + resource_group_name, + factory_name, + private_endpoint_connection_name, + if_match=None, + private_link_service_connection_state=None): + private_endpoint_wrapper = {} + private_endpoint_wrapper['properties'] = {} + private_endpoint_wrapper['properties']['private_link_service_connection_state'] = private_link_service_connection_state + return client.create_or_update(resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name, + if_match=if_match, + private_endpoint_wrapper=private_endpoint_wrapper) + + +def datafactory_private_endpoint_connection_update(client, + resource_group_name, + factory_name, + private_endpoint_connection_name, + if_match=None, + private_link_service_connection_state=None): + private_endpoint_wrapper = {} + private_endpoint_wrapper['properties'] = {} + private_endpoint_wrapper['properties']['private_link_service_connection_state'] = private_link_service_connection_state + return client.create_or_update(resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name, + if_match=if_match, + private_endpoint_wrapper=private_endpoint_wrapper) + + +def datafactory_private_endpoint_connection_delete(client, + resource_group_name, + factory_name, + private_endpoint_connection_name): + return client.delete(resource_group_name=resource_group_name, + factory_name=factory_name, + private_endpoint_connection_name=private_endpoint_connection_name) + + +def datafactory_private_link_resource_show(client, + resource_group_name, + factory_name): + return client.get(resource_group_name=resource_group_name, + factory_name=factory_name) diff --git a/src/datafactory/azext_datafactory/tests/latest/example_steps.py b/src/datafactory/azext_datafactory/tests/latest/example_steps.py index 42222d4e576..791da887268 100644 --- a/src/datafactory/azext_datafactory/tests/latest/example_steps.py +++ b/src/datafactory/azext_datafactory/tests/latest/example_steps.py @@ -581,7 +581,7 @@ def step_pipeline_update(test, rg, checks=None): 'ipeline().parameters.OutputBlobNameList\\"}}}}}}]" ' '--parameters "{{\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}}" ' '--duration "0.00:10:00" ' - '--name "{myPipeline}" ' + '--pipeline-name "{myPipeline}" ' '--resource-group "{rg}"', checks=checks) @@ -634,6 +634,66 @@ def step_pipeline_delete(test, rg, checks=None): checks=checks) +# EXAMPLE: /privateEndPointConnections/get/privateEndPointConnections_ListByFactory +@try_manual +def step_private_end_point_connection_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory private-end-point-connection list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /PrivateEndpointConnection/put/Approves or rejects a private endpoint connection for a factory. +@try_manual +def step_private_endpoint_connection_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory private-endpoint-connection create ' + '--factory-name "{myFactory}" ' + '--name "{myPrivateEndPointConnection}" ' + '--private-link-service-connection-state description="Approved by admin." actions-required="" ' + 'status="Approved" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /PrivateEndpointConnection/get/Get a private endpoint connection for a datafactory. +@try_manual +def step_private_endpoint_connection_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory private-endpoint-connection show ' + '--factory-name "{myFactory}" ' + '--name "{myPrivateEndPointConnection}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /PrivateEndpointConnection/delete/Delete a private endpoint connection for a datafactory. +@try_manual +def step_private_endpoint_connection_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory private-endpoint-connection delete -y ' + '--factory-name "{myFactory}" ' + '--name "{myPrivateEndPointConnection}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /privateLinkResources/get/Get private link resources of a site +@try_manual +def step_private_link_resource_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory private-link-resource show ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + # EXAMPLE: /Triggers/put/Triggers_Create @try_manual def step_trigger_create(test, rg, checks=None): diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py index 517a35650f8..d6795dbdb8b 100644 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py +++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py @@ -174,6 +174,7 @@ def __init__(self, *args, **kwargs): 'myDataset': self.create_random_name(prefix='exampleDataset'[:7], length=14), 'myPipeline': self.create_random_name(prefix='examplePipeline'[:7], length=15), 'myTrigger': self.create_random_name(prefix='exampleTrigger'[:7], length=14), + 'myPrivateEndPointConnection': 'connection', }) @ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg') diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md deleted file mode 100644 index b7eabe4528a..00000000000 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md +++ /dev/null @@ -1,48 +0,0 @@ -|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt| -|step_create|successed||||2021-04-26 09:05:32.308913|2021-04-26 09:05:32.501033| -|step_update|successed||||2021-04-26 09:05:22.750754|2021-04-26 09:05:22.880707| -|step_linked_service_create|successed||||2021-04-26 09:05:22.880707|2021-04-26 09:05:23.009706| -|step_linked_service_update|successed||||2021-04-26 09:05:23.010706|2021-04-26 09:05:23.174579| -|step_dataset_create|successed||||2021-04-26 09:05:23.174579|2021-04-26 09:05:23.317043| -|step_dataset_update|successed||||2021-04-26 09:05:23.318045|2021-04-26 09:05:23.451047| -|step_pipeline_create|successed||||2021-04-26 09:05:23.452049|2021-04-26 09:05:23.575751| -|step_trigger_create|successed||||2021-04-26 09:05:23.703756|2021-04-26 09:05:23.871057| -|step_trigger_update|successed||||2021-04-26 09:05:23.871057|2021-04-26 09:05:24.019053| -|step_integration_runtime_self_hosted_create|successed||||2021-04-26 09:05:24.019053|2021-04-26 09:05:24.155099| -|step_integration_runtime_update|successed||||2021-04-26 09:05:24.155099|2021-04-26 09:05:24.285096| -|step_integration_runtime_show|successed||||2021-04-26 09:05:29.524820|2021-04-26 09:05:29.675815| -|step_linked_service_show|successed||||2021-04-26 09:05:24.582291|2021-04-26 09:05:24.718292| -|step_pipeline_show|successed||||2021-04-26 09:05:24.719291|2021-04-26 09:05:24.872517| -|step_dataset_show|successed||||2021-04-26 09:05:24.873517|2021-04-26 09:05:25.000030| -|step_trigger_show|successed||||2021-04-26 09:05:33.782136|2021-04-26 09:05:33.927138| -|step_integration_runtime_list|successed||||2021-04-26 09:05:25.115003|2021-04-26 09:05:25.253055| -|step_linked_service_list|successed||||2021-04-26 09:05:25.254059|2021-04-26 09:05:25.409635| -|step_pipeline_list|successed||||2021-04-26 09:05:25.409635|2021-04-26 09:05:25.533704| -|step_trigger_list|successed||||2021-04-26 09:05:25.533704|2021-04-26 09:05:25.676865| -|step_dataset_list|successed||||2021-04-26 09:05:25.676865|2021-04-26 09:05:25.810871| -|step_show|successed||||2021-04-26 09:05:25.810871|2021-04-26 09:05:25.938042| -|step_list2|successed||||2021-04-26 09:05:25.938042|2021-04-26 09:05:26.060042| -|step_list|successed||||2021-04-26 09:05:26.060042|2021-04-26 09:05:26.183196| -|step_integration_runtime_regenerate_auth_key|successed||||2021-04-26 09:05:26.184194|2021-04-26 09:05:26.313194| -|step_integration_runtime_sync_credentials|successed||||2021-04-26 09:05:26.314192|2021-04-26 09:05:26.449307| -|step_integration_runtime_get_monitoring_data|successed||||2021-04-26 09:05:26.449307|2021-04-26 09:05:26.636000| -|step_integration_runtime_list_auth_key|successed||||2021-04-26 09:05:26.636000|2021-04-26 09:05:26.790002| -|step_integration_runtime_remove_link|successed||||2021-04-26 09:05:26.791005|2021-04-26 09:05:26.934513| -|step_integration_runtime_get_status|successed||||2021-04-26 09:05:26.935512|2021-04-26 09:05:27.069511| -|step_trigger_get_event_subscription_status|successed||||2021-04-26 09:05:27.069511|2021-04-26 09:05:27.211487| -|step_trigger_unsubscribe_from_event|successed||||2021-04-26 09:05:27.212492|2021-04-26 09:05:27.402802| -|step_trigger_subscribe_to_event|successed||||2021-04-26 09:05:27.402802|2021-04-26 09:05:27.532807| -|step_trigger_start|successed||||2021-04-26 09:05:33.632612|2021-04-26 09:05:33.782136| -|step_trigger_stop|successed||||2021-04-26 09:05:34.611518|2021-04-26 09:05:34.768873| -|step_get_data_plane_access|successed||||2021-04-26 09:05:27.837090|2021-04-26 09:05:27.977072| -|step_configure_factory_repo|successed||||2021-04-26 09:05:28.099075|2021-04-26 09:05:28.288426| -|step_integration_runtime_delete|successed||||2021-04-26 09:05:31.965947|2021-04-26 09:05:32.140944| -|step_trigger_delete|successed||||2021-04-26 09:05:34.768873|2021-04-26 09:05:34.900878| -|step_pipeline_delete|successed||||2021-04-26 09:05:34.900878|2021-04-26 09:05:35.030991| -|step_dataset_delete|successed||||2021-04-26 09:05:28.737334|2021-04-26 09:05:28.861337| -|step_linked_service_delete|successed||||2021-04-26 09:05:28.861337|2021-04-26 09:05:28.989612| -|step_delete|successed||||2021-04-26 09:05:35.031990|2021-04-26 09:05:35.197507| -|step_integration_runtime_start|successed||||2021-04-26 09:05:29.676815|2021-04-26 09:05:30.373119| -|step_integration_runtime_stop|successed||||2021-04-26 09:05:30.374118|2021-04-26 09:05:31.964925| -|step_activity_run_query_by_pipeline_run|successed||||2021-04-26 09:05:33.012581|2021-04-26 09:05:33.193579| -Coverage: 46/46 diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py index f272437a3e9..9dfe04b82d0 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py @@ -107,7 +107,6 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.operations = Operations( diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py deleted file mode 100644 index 411d6c4a66e..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, TYPE_CHECKING - -from azure.core.configuration import Configuration -from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential - -VERSION = "unknown" - -class DataFactoryManagementClientConfiguration(Configuration): - """Configuration for DataFactoryManagementClient. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The subscription identifier. - :type subscription_id: str - """ - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - **kwargs: Any - ) -> None: - if credential is None: - raise ValueError("Parameter 'credential' must not be None.") - if subscription_id is None: - raise ValueError("Parameter 'subscription_id' must not be None.") - super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs) - - self.credential = credential - self.subscription_id = subscription_id - self.api_version = "2018-06-01" - self.credential_scopes = ['https://management.azure.com/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) - kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION)) - self._configure(**kwargs) - - def _configure( - self, - **kwargs: Any - ) -> None: - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') - if self.credential and not self.authentication_policy: - self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py index 255a1839c21..01497b56d61 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py @@ -104,7 +104,6 @@ def __init__( client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) self.operations = Operations( diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py deleted file mode 100644 index b2b322686b8..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py +++ /dev/null @@ -1,143 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, Optional, TYPE_CHECKING - -from azure.mgmt.core import AsyncARMPipelineClient -from msrest import Deserializer, Serializer - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential - -from ._configuration_async import DataFactoryManagementClientConfiguration -from .operations_async import OperationOperations -from .operations_async import FactoryOperations -from .operations_async import ExposureControlOperations -from .operations_async import IntegrationRuntimeOperations -from .operations_async import IntegrationRuntimeObjectMetadataOperations -from .operations_async import IntegrationRuntimeNodeOperations -from .operations_async import LinkedServiceOperations -from .operations_async import DatasetOperations -from .operations_async import PipelineOperations -from .operations_async import PipelineRunOperations -from .operations_async import ActivityRunOperations -from .operations_async import TriggerOperations -from .operations_async import TriggerRunOperations -from .operations_async import DataFlowOperations -from .operations_async import DataFlowDebugSessionOperations -from .operations_async import ManagedVirtualNetworkOperations -from .operations_async import ManagedPrivateEndpointOperations -from .. import models - - -class DataFactoryManagementClient(object): - """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. - - :ivar operation: OperationOperations operations - :vartype operation: data_factory_management_client.aio.operations_async.OperationOperations - :ivar factory: FactoryOperations operations - :vartype factory: data_factory_management_client.aio.operations_async.FactoryOperations - :ivar exposure_control: ExposureControlOperations operations - :vartype exposure_control: data_factory_management_client.aio.operations_async.ExposureControlOperations - :ivar integration_runtime: IntegrationRuntimeOperations operations - :vartype integration_runtime: data_factory_management_client.aio.operations_async.IntegrationRuntimeOperations - :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations - :vartype integration_runtime_object_metadata: data_factory_management_client.aio.operations_async.IntegrationRuntimeObjectMetadataOperations - :ivar integration_runtime_node: IntegrationRuntimeNodeOperations operations - :vartype integration_runtime_node: data_factory_management_client.aio.operations_async.IntegrationRuntimeNodeOperations - :ivar linked_service: LinkedServiceOperations operations - :vartype linked_service: data_factory_management_client.aio.operations_async.LinkedServiceOperations - :ivar dataset: DatasetOperations operations - :vartype dataset: data_factory_management_client.aio.operations_async.DatasetOperations - :ivar pipeline: PipelineOperations operations - :vartype pipeline: data_factory_management_client.aio.operations_async.PipelineOperations - :ivar pipeline_run: PipelineRunOperations operations - :vartype pipeline_run: data_factory_management_client.aio.operations_async.PipelineRunOperations - :ivar activity_run: ActivityRunOperations operations - :vartype activity_run: data_factory_management_client.aio.operations_async.ActivityRunOperations - :ivar trigger: TriggerOperations operations - :vartype trigger: data_factory_management_client.aio.operations_async.TriggerOperations - :ivar trigger_run: TriggerRunOperations operations - :vartype trigger_run: data_factory_management_client.aio.operations_async.TriggerRunOperations - :ivar data_flow: DataFlowOperations operations - :vartype data_flow: data_factory_management_client.aio.operations_async.DataFlowOperations - :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations - :vartype data_flow_debug_session: data_factory_management_client.aio.operations_async.DataFlowDebugSessionOperations - :ivar managed_virtual_network: ManagedVirtualNetworkOperations operations - :vartype managed_virtual_network: data_factory_management_client.aio.operations_async.ManagedVirtualNetworkOperations - :ivar managed_private_endpoint: ManagedPrivateEndpointOperations operations - :vartype managed_private_endpoint: data_factory_management_client.aio.operations_async.ManagedPrivateEndpointOperations - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The subscription identifier. - :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - """ - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - base_url: Optional[str] = None, - **kwargs: Any - ) -> None: - if not base_url: - base_url = 'https://management.azure.com' - self._config = DataFactoryManagementClientConfiguration(credential, subscription_id, **kwargs) - self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._deserialize = Deserializer(client_models) - - self.operation = OperationOperations( - self._client, self._config, self._serialize, self._deserialize) - self.factory = FactoryOperations( - self._client, self._config, self._serialize, self._deserialize) - self.exposure_control = ExposureControlOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime = IntegrationRuntimeOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations( - self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_node = IntegrationRuntimeNodeOperations( - self._client, self._config, self._serialize, self._deserialize) - self.linked_service = LinkedServiceOperations( - self._client, self._config, self._serialize, self._deserialize) - self.dataset = DatasetOperations( - self._client, self._config, self._serialize, self._deserialize) - self.pipeline = PipelineOperations( - self._client, self._config, self._serialize, self._deserialize) - self.pipeline_run = PipelineRunOperations( - self._client, self._config, self._serialize, self._deserialize) - self.activity_run = ActivityRunOperations( - self._client, self._config, self._serialize, self._deserialize) - self.trigger = TriggerOperations( - self._client, self._config, self._serialize, self._deserialize) - self.trigger_run = TriggerRunOperations( - self._client, self._config, self._serialize, self._deserialize) - self.data_flow = DataFlowOperations( - self._client, self._config, self._serialize, self._deserialize) - self.data_flow_debug_session = DataFlowDebugSessionOperations( - self._client, self._config, self._serialize, self._deserialize) - self.managed_virtual_network = ManagedVirtualNetworkOperations( - self._client, self._config, self._serialize, self._deserialize) - self.managed_private_endpoint = ManagedPrivateEndpointOperations( - self._client, self._config, self._serialize, self._deserialize) - - async def close(self) -> None: - await self._client.close() - - async def __aenter__(self) -> "DataFactoryManagementClient": - await self._client.__aenter__() - return self - - async def __aexit__(self, *exc_details) -> None: - await self._client.__aexit__(*exc_details) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py deleted file mode 100644 index 554e3ba9232..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from ._operation_operations_async import OperationOperations -from ._factory_operations_async import FactoryOperations -from ._exposure_control_operations_async import ExposureControlOperations -from ._integration_runtime_operations_async import IntegrationRuntimeOperations -from ._integration_runtime_object_metadata_operations_async import IntegrationRuntimeObjectMetadataOperations -from ._integration_runtime_node_operations_async import IntegrationRuntimeNodeOperations -from ._linked_service_operations_async import LinkedServiceOperations -from ._dataset_operations_async import DatasetOperations -from ._pipeline_operations_async import PipelineOperations -from ._pipeline_run_operations_async import PipelineRunOperations -from ._activity_run_operations_async import ActivityRunOperations -from ._trigger_operations_async import TriggerOperations -from ._trigger_run_operations_async import TriggerRunOperations -from ._data_flow_operations_async import DataFlowOperations -from ._data_flow_debug_session_operations_async import DataFlowDebugSessionOperations -from ._managed_virtual_network_operations_async import ManagedVirtualNetworkOperations -from ._managed_private_endpoint_operations_async import ManagedPrivateEndpointOperations - -__all__ = [ - 'OperationOperations', - 'FactoryOperations', - 'ExposureControlOperations', - 'IntegrationRuntimeOperations', - 'IntegrationRuntimeObjectMetadataOperations', - 'IntegrationRuntimeNodeOperations', - 'LinkedServiceOperations', - 'DatasetOperations', - 'PipelineOperations', - 'PipelineRunOperations', - 'ActivityRunOperations', - 'TriggerOperations', - 'TriggerRunOperations', - 'DataFlowOperations', - 'DataFlowDebugSessionOperations', - 'ManagedVirtualNetworkOperations', - 'ManagedPrivateEndpointOperations', -] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py deleted file mode 100644 index 0d2e56be08b..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py +++ /dev/null @@ -1,127 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class ActivityRunOperations: - """ActivityRunOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def query_by_pipeline_run( - self, - resource_group_name: str, - factory_name: str, - run_id: str, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token_parameter: Optional[str] = None, - filters: Optional[List["models.RunQueryFilter"]] = None, - order_by: Optional[List["models.RunQueryOrderBy"]] = None, - **kwargs - ) -> "models.ActivityRunsQueryResponse": - """Query activity runs based on input filter conditions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ActivityRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_pipeline_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py deleted file mode 100644 index f1bf8ee8f73..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py +++ /dev/null @@ -1,551 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class DataFlowDebugSessionOperations: - """DataFlowDebugSessionOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def _create_initial( - self, - resource_group_name: str, - factory_name: str, - compute_type: Optional[str] = None, - core_count: Optional[int] = None, - time_to_live: Optional[int] = None, - name: Optional[str] = None, - properties: Optional["models.IntegrationRuntime"] = None, - **kwargs - ) -> Optional["models.CreateDataFlowDebugSessionResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self._create_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) - - if response.status_code == 202: - response_headers['location']=self._deserialize('str', response.headers.get('location')) - - if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized - _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore - - async def begin_create( - self, - resource_group_name: str, - factory_name: str, - compute_type: Optional[str] = None, - core_count: Optional[int] = None, - time_to_live: Optional[int] = None, - name: Optional[str] = None, - properties: Optional["models.IntegrationRuntime"] = None, - **kwargs - ) -> AsyncLROPoller["models.CreateDataFlowDebugSessionResponse"]: - """Creates a data flow debug session. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param compute_type: Compute type of the cluster. The value will be overwritten by the same - setting in integration runtime if provided. - :type compute_type: str - :param core_count: Core count of the cluster. The value will be overwritten by the same setting - in integration runtime if provided. - :type core_count: int - :param time_to_live: Time to live setting of the cluster in minutes. - :type time_to_live: int - :param name: The resource name. - :type name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.CreateDataFlowDebugSessionResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.CreateDataFlowDebugSessionResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._create_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - compute_type=compute_type, - core_count=core_count, - time_to_live=time_to_live, - name=name, - properties=properties, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore - - def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.QueryDataFlowDebugSessionsResponse"]: - """Query all active data flow debug sessions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.QueryDataFlowDebugSessionsResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('QueryDataFlowDebugSessionsResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions'} # type: ignore - - async def add_data_flow( - self, - resource_group_name: str, - factory_name: str, - session_id: Optional[str] = None, - datasets: Optional[List["models.DatasetDebugResource"]] = None, - linked_services: Optional[List["models.LinkedServiceDebugResource"]] = None, - source_settings: Optional[List["models.DataFlowSourceSetting"]] = None, - parameters: Optional[Dict[str, object]] = None, - dataset_parameters: Optional[object] = None, - folder_path: Optional[object] = None, - reference_name: Optional[str] = None, - name: Optional[str] = None, - properties: Optional["models.DataFlow"] = None, - **kwargs - ) -> "models.AddDataFlowToDebugSessionResponse": - """Add a data flow into debug session. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param datasets: List of datasets. - :type datasets: list[~data_factory_management_client.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource] - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType - string). - :type folder_path: object - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param name: The resource name. - :type name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AddDataFlowToDebugSessionResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AddDataFlowToDebugSessionResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, name=name, properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.add_data_flow.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DataFlowDebugPackage') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AddDataFlowToDebugSessionResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - add_data_flow.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - session_id: Optional[str] = None, - **kwargs - ) -> None: - """Deletes a data flow debug session. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession'} # type: ignore - - async def _execute_command_initial( - self, - resource_group_name: str, - factory_name: str, - session_id: Optional[str] = None, - command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None, - command_payload: Optional["models.DataFlowDebugCommandPayload"] = None, - **kwargs - ) -> Optional["models.DataFlowDebugCommandResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self._execute_command_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - response_headers = {} - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) - - if response.status_code == 202: - response_headers['location']=self._deserialize('str', response.headers.get('location')) - - if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized - _execute_command_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore - - async def begin_execute_command( - self, - resource_group_name: str, - factory_name: str, - session_id: Optional[str] = None, - command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None, - command_payload: Optional["models.DataFlowDebugCommandPayload"] = None, - **kwargs - ) -> AsyncLROPoller["models.DataFlowDebugCommandResponse"]: - """Execute a data flow debug command. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param command: The command type. - :type command: str or ~data_factory_management_client.models.DataFlowDebugCommandType - :param command_payload: The command payload object. - :type command_payload: ~data_factory_management_client.models.DataFlowDebugCommandPayload - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.DataFlowDebugCommandResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowDebugCommandResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._execute_command_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - session_id=session_id, - command=command, - command_payload=command_payload, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py deleted file mode 100644 index b5c2e5656ce..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py +++ /dev/null @@ -1,309 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class DataFlowOperations: - """DataFlowOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - data_flow_name: str, - properties: "models.DataFlow", - if_match: Optional[str] = None, - **kwargs - ) -> "models.DataFlowResource": - """Creates or updates a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow - :param if_match: ETag of the data flow entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - data_flow = models.DataFlowResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(data_flow, 'DataFlowResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DataFlowResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - data_flow_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> "models.DataFlowResource": - """Gets a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the - ETag matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DataFlowResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - data_flow_name: str, - **kwargs - ) -> None: - """Deletes a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.DataFlowListResponse"]: - """Lists data flows. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataFlowListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DataFlowListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('DataFlowListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py deleted file mode 100644 index a8be0369365..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py +++ /dev/null @@ -1,311 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class DatasetOperations: - """DatasetOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.DatasetListResponse"]: - """Lists datasets. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DatasetListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DatasetListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('DatasetListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - dataset_name: str, - properties: "models.Dataset", - if_match: Optional[str] = None, - **kwargs - ) -> "models.DatasetResource": - """Creates or updates a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :param properties: Dataset properties. - :type properties: ~data_factory_management_client.models.Dataset - :param if_match: ETag of the dataset entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - dataset = models.DatasetResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(dataset, 'DatasetResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DatasetResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - dataset_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.DatasetResource"]: - """Gets a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('DatasetResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - dataset_name: str, - **kwargs - ) -> None: - """Deletes a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py deleted file mode 100644 index b20acb1c3c8..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py +++ /dev/null @@ -1,241 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class ExposureControlOperations: - """ExposureControlOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def get_feature_value( - self, - location_id: str, - feature_name: Optional[str] = None, - feature_type: Optional[str] = None, - **kwargs - ) -> "models.ExposureControlResponse": - """Get exposure control feature for specific location. - - :param location_id: The location identifier. - :type location_id: str - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_feature_value.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ExposureControlResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_feature_value.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue'} # type: ignore - - async def get_feature_value_by_factory( - self, - resource_group_name: str, - factory_name: str, - feature_name: Optional[str] = None, - feature_type: Optional[str] = None, - **kwargs - ) -> "models.ExposureControlResponse": - """Get exposure control feature for specific factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_feature_value_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ExposureControlResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore - - async def query_feature_value_by_factory( - self, - resource_group_name: str, - factory_name: str, - exposure_control_requests: List["models.ExposureControlRequest"], - **kwargs - ) -> "models.ExposureControlBatchResponse": - """Get list of exposure control features for specific factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param exposure_control_requests: List of exposure control features. - :type exposure_control_requests: list[~data_factory_management_client.models.ExposureControlRequest] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExposureControlBatchResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - exposure_control_batch_request = models.ExposureControlBatchRequest(exposure_control_requests=exposure_control_requests) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_feature_value_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ExposureControlBatchResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py deleted file mode 100644 index 46f37c1a6f7..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py +++ /dev/null @@ -1,658 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class FactoryOperations: - """FactoryOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs - ) -> AsyncIterable["models.FactoryListResponse"]: - """Lists factories under the specified subscription. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore - - async def configure_factory_repo( - self, - location_id: str, - factory_resource_id: Optional[str] = None, - repo_configuration: Optional["models.FactoryRepoConfiguration"] = None, - **kwargs - ) -> "models.Factory": - """Updates a factory's repo information. - - :param location_id: The location identifier. - :type location_id: str - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.configure_factory_repo.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore - - def list_by_resource_group( - self, - resource_group_name: str, - **kwargs - ) -> AsyncIterable["models.FactoryListResponse"]: - """Lists factories. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - if_match: Optional[str] = None, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - identity: Optional["models.FactoryIdentity"] = None, - repo_configuration: Optional["models.FactoryRepoConfiguration"] = None, - global_parameters: Optional[Dict[str, "models.GlobalParameterSpecification"]] = None, - **kwargs - ) -> "models.Factory": - """Creates or updates a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param if_match: ETag of the factory entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory, 'Factory') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - async def update( - self, - resource_group_name: str, - factory_name: str, - tags: Optional[Dict[str, str]] = None, - identity: Optional["models.FactoryIdentity"] = None, - **kwargs - ) -> "models.Factory": - """Updates a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.Factory"]: - """Gets a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> None: - """Deletes a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - async def get_git_hub_access_token( - self, - resource_group_name: str, - factory_name: str, - git_hub_access_code: str, - git_hub_access_token_base_url: str, - git_hub_client_id: Optional[str] = None, - **kwargs - ) -> "models.GitHubAccessTokenResponse": - """Get GitHub Access Token. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param git_hub_access_code: GitHub access code. - :type git_hub_access_code: str - :param git_hub_access_token_base_url: GitHub access token base URL. - :type git_hub_access_token_base_url: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: GitHubAccessTokenResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_git_hub_access_token.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore - - async def get_data_plane_access( - self, - resource_group_name: str, - factory_name: str, - permissions: Optional[str] = None, - access_resource_path: Optional[str] = None, - profile_name: Optional[str] = None, - start_time: Optional[str] = None, - expire_time: Optional[str] = None, - **kwargs - ) -> "models.AccessPolicyResponse": - """Get Data Plane access. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param permissions: The string with permissions for Data Plane access. Currently only 'r' is - supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to factory. Currently - only empty string is supported which corresponds to the factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default is supported. The - default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for the token is eight - hours and by default the token will expire in eight hours. - :type expire_time: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AccessPolicyResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AccessPolicyResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_data_plane_access.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(policy, 'UserAccessPolicy') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AccessPolicyResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py deleted file mode 100644 index a6022196653..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py +++ /dev/null @@ -1,301 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeNodeOperations: - """IntegrationRuntimeNodeOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def get( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs - ) -> "models.SelfHostedIntegrationRuntimeNode": - """Gets a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs - ) -> None: - """Deletes a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - async def update( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - concurrent_jobs_limit: Optional[int] = None, - **kwargs - ) -> "models.SelfHostedIntegrationRuntimeNode": - """Updates a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration - runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - async def get_ip_address( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - node_name: str, - **kwargs - ) -> "models.IntegrationRuntimeNodeIpAddress": - """Get the IP address of self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_ip_address.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py deleted file mode 100644 index 70df0716c21..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py +++ /dev/null @@ -1,230 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeObjectMetadataOperations: - """IntegrationRuntimeObjectMetadataOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def _refresh_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> Optional["models.SsisObjectMetadataStatusResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._refresh_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _refresh_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore - - async def begin_refresh( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> AsyncLROPoller["models.SsisObjectMetadataStatusResponse"]: - """Refresh a SSIS integration runtime object metadata. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.SsisObjectMetadataStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataStatusResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._refresh_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - metadata_path: Optional[str] = None, - **kwargs - ) -> "models.SsisObjectMetadataListResponse": - """Get a SSIS integration runtime object metadata by specified path. The return is pageable - metadata list. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param metadata_path: Metadata path. - :type metadata_path: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SsisObjectMetadataListResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SsisObjectMetadataListResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - if get_metadata_request is not None: - body_content = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest') - else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SsisObjectMetadataListResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py deleted file mode 100644 index 82b285c7a74..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py +++ /dev/null @@ -1,1176 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeOperations: - """IntegrationRuntimeOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.IntegrationRuntimeListResponse"]: - """Lists integration runtimes. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - properties: "models.IntegrationRuntime", - if_match: Optional[str] = None, - **kwargs - ) -> "models.IntegrationRuntimeResource": - """Creates or updates an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime - :param if_match: ETag of the integration runtime entity. Should only be specified for update, - for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - integration_runtime = models.IntegrationRuntimeResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.IntegrationRuntimeResource"]: - """Gets an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param if_none_match: ETag of the integration runtime entity. Should only be specified for get. - If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - async def update( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - auto_update: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]] = None, - update_delay_offset: Optional[str] = None, - **kwargs - ) -> "models.IntegrationRuntimeResource": - """Updates an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param auto_update: Enables or disables the auto-update feature of the self-hosted integration - runtime. See https://go.microsoft.com/fwlink/?linkid=854189. - :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The - integration runtime auto update will happen on that time. - :type update_delay_offset: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> None: - """Deletes an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - async def get_status( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> "models.IntegrationRuntimeStatusResponse": - """Gets detailed status information for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore - - async def get_connection_info( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> "models.IntegrationRuntimeConnectionInfo": - """Gets the on-premises integration runtime connection information for encrypting the on-premises - data source credentials. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeConnectionInfo, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_connection_info.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore - - async def regenerate_auth_key( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - key_name: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]] = None, - **kwargs - ) -> "models.IntegrationRuntimeAuthKeys": - """Regenerates the authentication key for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param key_name: The name of the authentication key to regenerate. - :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.regenerate_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore - - async def list_auth_key( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> "models.IntegrationRuntimeAuthKeys": - """Retrieves the authentication keys for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.list_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore - - async def _start_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> Optional["models.IntegrationRuntimeStatusResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - - async def begin_start( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> AsyncLROPoller["models.IntegrationRuntimeStatusResponse"]: - """Starts a ManagedReserved type integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - - async def _stop_initial( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - - async def begin_stop( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> AsyncLROPoller[None]: - """Stops a ManagedReserved type integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - - async def sync_credentials( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> None: - """Force the integration runtime to synchronize credentials across integration runtime nodes, and - this will override the credentials across all worker nodes with those available on the - dispatcher node. If you already have the latest credential backup file, you should manually - import it (preferred) on any self-hosted integration runtime node than using this API directly. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.sync_credentials.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore - - async def get_monitoring_data( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> "models.IntegrationRuntimeMonitoringData": - """Get the integration runtime monitoring data, which includes the monitor data for all the nodes - under this integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeMonitoringData, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_monitoring_data.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore - - async def upgrade( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - **kwargs - ) -> None: - """Upgrade self-hosted integration runtime to latest version if availability. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.upgrade.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore - - async def remove_link( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - linked_factory_name: str, - **kwargs - ) -> None: - """Remove all linked integration runtimes under specific data factory in a self-hosted integration - runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param linked_factory_name: The data factory name for linked integration runtime. - :type linked_factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.remove_link.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore - - async def create_linked_integration_runtime( - self, - resource_group_name: str, - factory_name: str, - integration_runtime_name: str, - name: Optional[str] = None, - subscription_id: Optional[str] = None, - data_factory_name: Optional[str] = None, - data_factory_location: Optional[str] = None, - **kwargs - ) -> "models.IntegrationRuntimeStatusResponse": - """Create a linked integration runtime entry in a shared integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked integration runtime belongs - to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked integration runtime - belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the linked integration - runtime belongs to. - :type data_factory_location: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_linked_integration_runtime.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py deleted file mode 100644 index 56e9e6f663a..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py +++ /dev/null @@ -1,312 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class LinkedServiceOperations: - """LinkedServiceOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.LinkedServiceListResponse"]: - """Lists linked services. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.LinkedServiceListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - linked_service_name: str, - properties: "models.LinkedService", - if_match: Optional[str] = None, - **kwargs - ) -> "models.LinkedServiceResource": - """Creates or updates a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param properties: Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService - :param if_match: ETag of the linkedService entity. Should only be specified for update, for - which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - linked_service = models.LinkedServiceResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_service, 'LinkedServiceResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - linked_service_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.LinkedServiceResource"]: - """Gets a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param if_none_match: ETag of the linked service entity. Should only be specified for get. If - the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - linked_service_name: str, - **kwargs - ) -> None: - """Deletes a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py deleted file mode 100644 index 3a899779963..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py +++ /dev/null @@ -1,336 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class ManagedPrivateEndpointOperations: - """ManagedPrivateEndpointOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - **kwargs - ) -> AsyncIterable["models.ManagedPrivateEndpointListResponse"]: - """Lists managed private endpoints. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - managed_private_endpoint_name: str, - if_match: Optional[str] = None, - connection_state: Optional["models.ConnectionStateProperties"] = None, - fqdns: Optional[List[str]] = None, - group_id: Optional[str] = None, - private_link_resource_id: Optional[str] = None, - **kwargs - ) -> "models.ManagedPrivateEndpointResource": - """Creates or updates a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :param if_match: ETag of the managed private endpoint entity. Should only be specified for - update, for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - managed_private_endpoint_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> "models.ManagedPrivateEndpointResource": - """Gets a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for - get. If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - managed_private_endpoint_name: str, - **kwargs - ) -> None: - """Deletes a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py deleted file mode 100644 index 2152988d7ef..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py +++ /dev/null @@ -1,255 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class ManagedVirtualNetworkOperations: - """ManagedVirtualNetworkOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.ManagedVirtualNetworkListResponse"]: - """Lists managed Virtual Networks. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - properties: "models.ManagedVirtualNetwork", - if_match: Optional[str] = None, - **kwargs - ) -> "models.ManagedVirtualNetworkResource": - """Creates or updates a managed Virtual Network. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param properties: Managed Virtual Network properties. - :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork - :param if_match: ETag of the managed Virtual Network entity. Should only be specified for - update, for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - managed_virtual_network_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> "models.ManagedVirtualNetworkResource": - """Gets a managed Virtual Network. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for - get. If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py deleted file mode 100644 index 83206d77039..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py +++ /dev/null @@ -1,101 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class OperationOperations: - """OperationOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs - ) -> AsyncIterable["models.OperationListResponse"]: - """Lists the available Azure Data Factory API operations. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.OperationListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py deleted file mode 100644 index 34c7453f951..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py +++ /dev/null @@ -1,405 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class PipelineOperations: - """PipelineOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.PipelineListResponse"]: - """Lists pipelines. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PipelineListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.PipelineListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('PipelineListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - pipeline: "models.PipelineResource", - if_match: Optional[str] = None, - **kwargs - ) -> "models.PipelineResource": - """Creates or updates a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param pipeline: Pipeline resource definition. - :type pipeline: ~data_factory_management_client.models.PipelineResource - :param if_match: ETag of the pipeline entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(pipeline, 'PipelineResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.PipelineResource"]: - """Gets a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the - ETag matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('PipelineResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - **kwargs - ) -> None: - """Deletes a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - async def create_run( - self, - resource_group_name: str, - factory_name: str, - pipeline_name: str, - reference_pipeline_run_id: Optional[str] = None, - is_recovery: Optional[bool] = None, - start_activity_name: Optional[str] = None, - start_from_failure: Optional[bool] = None, - parameters: Optional[Dict[str, object]] = None, - **kwargs - ) -> "models.CreateRunResponse": - """Creates a run of a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the - parameters of the specified run will be used to create a new run. - :type reference_pipeline_run_id: str - :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified - referenced pipeline run and the new run will be grouped under the same groupId. - :type is_recovery: bool - :param start_activity_name: In recovery mode, the rerun will start from this activity. If not - specified, all activities will run. - :type start_activity_name: str - :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed - activities. The property will be used only if startActivityName is not specified. - :type start_from_failure: bool - :param parameters: Parameters of the pipeline run. These parameters will be used only if the - runId is not specified. - :type parameters: dict[str, object] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: CreateRunResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.CreateRunResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if reference_pipeline_run_id is not None: - query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') - if is_recovery is not None: - query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') - if start_activity_name is not None: - query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') - if start_from_failure is not None: - query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - if parameters is not None: - body_content = self._serialize.body(parameters, '{object}') - else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('CreateRunResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py deleted file mode 100644 index 5cdfd09fe01..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py +++ /dev/null @@ -1,243 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class PipelineRunOperations: - """PipelineRunOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token_parameter: Optional[str] = None, - filters: Optional[List["models.RunQueryFilter"]] = None, - order_by: Optional[List["models.RunQueryOrderBy"]] = None, - **kwargs - ) -> "models.PipelineRunsQueryResponse": - """Query pipeline runs in the factory based on input filter conditions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - run_id: str, - **kwargs - ) -> "models.PipelineRun": - """Get a pipeline run by its run ID. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRun, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRun - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineRun', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore - - async def cancel( - self, - resource_group_name: str, - factory_name: str, - run_id: str, - is_recursive: Optional[bool] = None, - **kwargs - ) -> None: - """Cancel a pipeline run by its run ID. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current - pipeline. - :type is_recursive: bool - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if is_recursive is not None: - query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py deleted file mode 100644 index f4669b45bc2..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py +++ /dev/null @@ -1,877 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class TriggerOperations: - """TriggerOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name: str, - factory_name: str, - **kwargs - ) -> AsyncIterable["models.TriggerListResponse"]: - """Lists triggers. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either TriggerListResponse or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.TriggerListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('TriggerListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore - - async def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - continuation_token_parameter: Optional[str] = None, - parent_trigger_name: Optional[str] = None, - **kwargs - ) -> "models.TriggerQueryResponse": - """Query triggers. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun - triggers. - :type parent_trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore - - async def create_or_update( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - properties: "models.Trigger", - if_match: Optional[str] = None, - **kwargs - ) -> "models.TriggerResource": - """Creates or updates a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param properties: Properties of the trigger. - :type properties: ~data_factory_management_client.models.Trigger - :param if_match: ETag of the trigger entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - trigger = models.TriggerResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(trigger, 'TriggerResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - async def get( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - if_none_match: Optional[str] = None, - **kwargs - ) -> Optional["models.TriggerResource"]: - """Gets a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - async def delete( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> None: - """Deletes a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - async def _subscribe_to_event_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> Optional["models.TriggerSubscriptionOperationStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._subscribe_to_event_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - - async def begin_subscribe_to_event( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]: - """Subscribe event trigger to events. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._subscribe_to_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - - async def get_event_subscription_status( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> "models.TriggerSubscriptionOperationStatus": - """Get a trigger's event subscription status. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerSubscriptionOperationStatus, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_event_subscription_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore - - async def _unsubscribe_from_event_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> Optional["models.TriggerSubscriptionOperationStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - - async def begin_unsubscribe_from_event( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]: - """Unsubscribe event trigger from events. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._unsubscribe_from_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - - async def _start_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - - async def begin_start( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> AsyncLROPoller[None]: - """Starts a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - - async def _stop_initial( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore - - async def begin_stop( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - **kwargs - ) -> AsyncLROPoller[None]: - """Stops a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py deleted file mode 100644 index 3401f9c95c1..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py +++ /dev/null @@ -1,241 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class TriggerRunOperations: - """TriggerRunOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def rerun( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - run_id: str, - **kwargs - ) -> None: - """Rerun single trigger instance by runId. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.rerun.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore - - async def cancel( - self, - resource_group_name: str, - factory_name: str, - trigger_name: str, - run_id: str, - **kwargs - ) -> None: - """Cancel a single trigger instance by runId. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore - - async def query_by_factory( - self, - resource_group_name: str, - factory_name: str, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token_parameter: Optional[str] = None, - filters: Optional[List["models.RunQueryFilter"]] = None, - order_by: Optional[List["models.RunQueryOrderBy"]] = None, - **kwargs - ) -> "models.TriggerRunsQueryResponse": - """Query trigger runs. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py index 1f1ab102631..915283971fa 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py @@ -390,12 +390,14 @@ from ._models_py3 import MarketoLinkedService from ._models_py3 import MarketoObjectDataset from ._models_py3 import MarketoSource + from ._models_py3 import MetadataItem from ._models_py3 import MicrosoftAccessLinkedService from ._models_py3 import MicrosoftAccessSink from ._models_py3 import MicrosoftAccessSource from ._models_py3 import MicrosoftAccessTableDataset from ._models_py3 import MongoDbAtlasCollectionDataset from ._models_py3 import MongoDbAtlasLinkedService + from ._models_py3 import MongoDbAtlasSink from ._models_py3 import MongoDbAtlasSource from ._models_py3 import MongoDbCollectionDataset from ._models_py3 import MongoDbCursorMethodsProperties @@ -403,6 +405,7 @@ from ._models_py3 import MongoDbSource from ._models_py3 import MongoDbV2CollectionDataset from ._models_py3 import MongoDbV2LinkedService + from ._models_py3 import MongoDbV2Sink from ._models_py3 import MongoDbV2Source from ._models_py3 import MultiplePipelineTrigger from ._models_py3 import MySqlLinkedService @@ -1055,12 +1058,14 @@ from ._models import MarketoLinkedService # type: ignore from ._models import MarketoObjectDataset # type: ignore from ._models import MarketoSource # type: ignore + from ._models import MetadataItem # type: ignore from ._models import MicrosoftAccessLinkedService # type: ignore from ._models import MicrosoftAccessSink # type: ignore from ._models import MicrosoftAccessSource # type: ignore from ._models import MicrosoftAccessTableDataset # type: ignore from ._models import MongoDbAtlasCollectionDataset # type: ignore from ._models import MongoDbAtlasLinkedService # type: ignore + from ._models import MongoDbAtlasSink # type: ignore from ._models import MongoDbAtlasSource # type: ignore from ._models import MongoDbCollectionDataset # type: ignore from ._models import MongoDbCursorMethodsProperties # type: ignore @@ -1068,6 +1073,7 @@ from ._models import MongoDbSource # type: ignore from ._models import MongoDbV2CollectionDataset # type: ignore from ._models import MongoDbV2LinkedService # type: ignore + from ._models import MongoDbV2Sink # type: ignore from ._models import MongoDbV2Source # type: ignore from ._models import MultiplePipelineTrigger # type: ignore from ._models import MySqlLinkedService # type: ignore @@ -1356,7 +1362,6 @@ DependencyCondition, DynamicsAuthenticationType, DynamicsDeploymentType, - DynamicsServicePrincipalCredentialType, DynamicsSinkWriteBehavior, EventSubscriptionStatus, FactoryIdentityType, @@ -1410,6 +1415,7 @@ SapTablePartitionOption, SelfHostedIntegrationRuntimeNodeStatus, ServiceNowAuthenticationType, + ServicePrincipalCredentialType, SftpAuthenticationType, SparkAuthenticationType, SparkServerType, @@ -1816,12 +1822,14 @@ 'MarketoLinkedService', 'MarketoObjectDataset', 'MarketoSource', + 'MetadataItem', 'MicrosoftAccessLinkedService', 'MicrosoftAccessSink', 'MicrosoftAccessSource', 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollectionDataset', 'MongoDbAtlasLinkedService', + 'MongoDbAtlasSink', 'MongoDbAtlasSource', 'MongoDbCollectionDataset', 'MongoDbCursorMethodsProperties', @@ -1829,6 +1837,7 @@ 'MongoDbSource', 'MongoDbV2CollectionDataset', 'MongoDbV2LinkedService', + 'MongoDbV2Sink', 'MongoDbV2Source', 'MultiplePipelineTrigger', 'MySqlLinkedService', @@ -2115,7 +2124,6 @@ 'DependencyCondition', 'DynamicsAuthenticationType', 'DynamicsDeploymentType', - 'DynamicsServicePrincipalCredentialType', 'DynamicsSinkWriteBehavior', 'EventSubscriptionStatus', 'FactoryIdentityType', @@ -2169,6 +2177,7 @@ 'SapTablePartitionOption', 'SelfHostedIntegrationRuntimeNodeStatus', 'ServiceNowAuthenticationType', + 'ServicePrincipalCredentialType', 'SftpAuthenticationType', 'SparkAuthenticationType', 'SparkServerType', diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py index 1e1c0d92c7d..e5d4b5d0f99 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py @@ -77,14 +77,16 @@ class CassandraSourceReadConsistencyLevels(with_metaclass(_CaseInsensitiveEnumMe LOCAL_SERIAL = "LOCAL_SERIAL" class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """All available compressionCodec values. + """ NONE = "none" - GZIP = "gzip" - SNAPPY = "snappy" LZO = "lzo" BZIP2 = "bzip2" + GZIP = "gzip" DEFLATE = "deflate" ZIP_DEFLATE = "zipDeflate" + SNAPPY = "snappy" LZ4 = "lz4" TAR = "tar" TAR_G_ZIP = "tarGZip" @@ -174,9 +176,7 @@ class DependencyCondition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): COMPLETED = "Completed" class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' - for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in - online scenario. Type: string (or Expression with resultType string). + """All available dynamicsAuthenticationType values. """ OFFICE365 = "Office365" @@ -184,23 +184,12 @@ class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, E AAD_SERVICE_PRINCIPAL = "AADServicePrincipal" class DynamicsDeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The deployment type of the Dynamics instance. 'Online' for Dynamics Online and - 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with - resultType string). + """All available dynamicsDeploymentType values. """ ONLINE = "Online" ON_PREMISES_WITH_IFD = "OnPremisesWithIfd" -class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The service principal credential type to use in Server-To-Server authentication. - 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or - Expression with resultType string). - """ - - SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" - SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" - class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Defines values for DynamicsSinkWriteBehavior. """ @@ -267,7 +256,7 @@ class HBaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum BASIC = "Basic" class HdiNodeTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The node types on which the script action should be executed. + """All available HdiNodeTypes values. """ HEADNODE = "Headnode" @@ -352,6 +341,7 @@ class IntegrationRuntimeEntityReferenceType(with_metaclass(_CaseInsensitiveEnumM INTEGRATION_RUNTIME_REFERENCE = "IntegrationRuntimeReference" LINKED_SERVICE_REFERENCE = "LinkedServiceReference" + CREDENTIAL_REFERENCE = "CredentialReference" class IntegrationRuntimeInternalChannelEncryptionMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """It is used to set the encryption mode for node-node communication channel (when more than 2 @@ -417,8 +407,7 @@ class JsonFormatFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)) ARRAY_OF_OBJECTS = "arrayOfObjects" class JsonWriteFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """File pattern of JSON. This setting controls the way a collection of JSON objects will be - treated. The default value is 'setOfObjects'. It is case-sensitive. + """All available filePatterns. """ SET_OF_OBJECTS = "setOfObjects" @@ -661,6 +650,13 @@ class ServiceNowAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, BASIC = "Basic" O_AUTH2 = "OAuth2" +class ServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """All available servicePrincipalCredentialType values. + """ + + SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey" + SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" + class SftpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the FTP server. """ diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py index e97fd0ab305..ae5284391fd 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py @@ -634,6 +634,9 @@ class CopySource(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -646,6 +649,7 @@ class CopySource(msrest.serialization.Model): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -662,6 +666,7 @@ def __init__( self.source_retry_count = kwargs.get('source_retry_count', None) self.source_retry_wait = kwargs.get('source_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) class TabularSource(CopySource): @@ -686,6 +691,9 @@ class TabularSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -704,6 +712,7 @@ class TabularSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -741,6 +750,9 @@ class AmazonMwsSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -762,6 +774,7 @@ class AmazonMwsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -868,6 +881,9 @@ class AmazonRedshiftSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -892,6 +908,7 @@ class AmazonRedshiftSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -1156,6 +1173,9 @@ class StoreReadSettings(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -1166,6 +1186,7 @@ class StoreReadSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -1180,6 +1201,7 @@ def __init__( self.additional_properties = kwargs.get('additional_properties', None) self.type = 'StoreReadSettings' # type: str self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) class AmazonS3CompatibleReadSettings(StoreReadSettings): @@ -1195,6 +1217,9 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -1235,6 +1260,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1490,6 +1516,9 @@ class AmazonS3ReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -1530,6 +1559,7 @@ class AmazonS3ReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1664,10 +1694,9 @@ class AvroDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the avro storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or - ~data_factory_management_client.models.AvroCompressionCodec + :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with + resultType string). + :type avro_compression_codec: object :param avro_compression_level: :type avro_compression_level: int """ @@ -1689,7 +1718,7 @@ class AvroDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'}, 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } @@ -1788,7 +1817,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. @@ -1812,6 +1841,9 @@ class CopySink(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -1826,10 +1858,11 @@ class CopySink(msrest.serialization.Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -1844,6 +1877,7 @@ def __init__( self.sink_retry_count = kwargs.get('sink_retry_count', None) self.sink_retry_wait = kwargs.get('sink_retry_wait', None) self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) class AvroSink(CopySink): @@ -1871,6 +1905,9 @@ class AvroSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Avro store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: Avro format settings. @@ -1889,6 +1926,7 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } @@ -1922,6 +1960,9 @@ class AvroSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Avro store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -1939,6 +1980,7 @@ class AvroSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -2467,6 +2509,9 @@ class AzureBlobFsReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -2504,6 +2549,7 @@ class AzureBlobFsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -2557,8 +2603,14 @@ class AzureBlobFsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~data_factory_management_client.models.MetadataItem] """ _validation = { @@ -2573,7 +2625,9 @@ class AzureBlobFsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -2583,6 +2637,7 @@ def __init__( super(AzureBlobFsSink, self).__init__(**kwargs) self.type = 'AzureBlobFSSink' # type: str self.copy_behavior = kwargs.get('copy_behavior', None) + self.metadata = kwargs.get('metadata', None) class AzureBlobFsSource(CopySource): @@ -2604,6 +2659,9 @@ class AzureBlobFsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object @@ -2625,6 +2683,7 @@ class AzureBlobFsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -2657,6 +2716,9 @@ class StoreWriteSettings(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -2669,6 +2731,7 @@ class StoreWriteSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -2684,6 +2747,7 @@ def __init__( self.additional_properties = kwargs.get('additional_properties', None) self.type = 'StoreWriteSettings' # type: str self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None) self.copy_behavior = kwargs.get('copy_behavior', None) @@ -2700,6 +2764,9 @@ class AzureBlobFsWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer @@ -2715,6 +2782,7 @@ class AzureBlobFsWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -2881,6 +2949,9 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -2921,6 +2992,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -2964,6 +3036,9 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer @@ -2979,6 +3054,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -3296,6 +3372,9 @@ class AzureDatabricksDeltaLakeSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -3316,6 +3395,7 @@ class AzureDatabricksDeltaLakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, } @@ -3349,6 +3429,9 @@ class AzureDatabricksDeltaLakeSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). :type query: object @@ -3367,6 +3450,7 @@ class AzureDatabricksDeltaLakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, } @@ -3734,6 +3818,9 @@ class AzureDataExplorerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. :type ingestion_mapping_name: object @@ -3757,6 +3844,7 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, @@ -3792,6 +3880,9 @@ class AzureDataExplorerSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). :type query: object @@ -3817,6 +3908,7 @@ class AzureDataExplorerSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, @@ -4190,6 +4282,9 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4235,6 +4330,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -4292,6 +4388,9 @@ class AzureDataLakeStoreSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param enable_adls_single_file_parallel: Single File Parallel. @@ -4310,6 +4409,7 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } @@ -4343,6 +4443,9 @@ class AzureDataLakeStoreSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4358,6 +4461,7 @@ class AzureDataLakeStoreSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } @@ -4383,6 +4487,9 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to @@ -4399,6 +4506,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } @@ -4550,6 +4658,9 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4590,6 +4701,7 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -4633,6 +4745,9 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -4645,6 +4760,7 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -4979,6 +5095,9 @@ class AzureMariaDbSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -5000,6 +5119,7 @@ class AzureMariaDbSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -5580,6 +5700,9 @@ class AzureMySqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -5597,6 +5720,7 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -5628,6 +5752,9 @@ class AzureMySqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -5648,6 +5775,7 @@ class AzureMySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -5807,6 +5935,9 @@ class AzurePostgreSqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -5824,6 +5955,7 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -5855,6 +5987,9 @@ class AzurePostgreSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -5876,6 +6011,7 @@ class AzurePostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -5984,6 +6120,9 @@ class AzureQueueSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -5998,6 +6137,7 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } def __init__( @@ -6093,6 +6233,9 @@ class AzureSearchIndexSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: "Merge", "Upload". :type write_behavior: str or @@ -6111,6 +6254,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -6587,6 +6731,9 @@ class AzureSqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -6619,6 +6766,7 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -6660,6 +6808,9 @@ class AzureSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -6695,6 +6846,7 @@ class AzureSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -6938,6 +7090,9 @@ class AzureTableSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). :type azure_table_default_partition_key_value: object @@ -6964,6 +7119,7 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, @@ -7001,6 +7157,9 @@ class AzureTableSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -7025,6 +7184,7 @@ class AzureTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, @@ -7263,6 +7423,9 @@ class BinarySink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Binary store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings """ @@ -7279,6 +7442,7 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -7310,6 +7474,9 @@ class BinarySource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Binary store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: Binary format settings. @@ -7326,6 +7493,7 @@ class BinarySource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } @@ -7543,6 +7711,9 @@ class BlobSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). :type blob_writer_overwrite_files: object @@ -7554,6 +7725,9 @@ class BlobSink(CopySink): :type blob_writer_add_header: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~data_factory_management_client.models.MetadataItem] """ _validation = { @@ -7568,10 +7742,12 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -7584,6 +7760,7 @@ def __init__( self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None) self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None) self.copy_behavior = kwargs.get('copy_behavior', None) + self.metadata = kwargs.get('metadata', None) class BlobSource(CopySource): @@ -7605,6 +7782,9 @@ class BlobSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object @@ -7626,6 +7806,7 @@ class BlobSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -7794,6 +7975,9 @@ class CassandraSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -7823,6 +8007,7 @@ class CassandraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -8140,8 +8325,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param deployment_type: Required. The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + string). + :type deployment_type: object :param host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). @@ -8162,10 +8347,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: object :param username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). :type username: object @@ -8176,10 +8359,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -8205,16 +8386,16 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -8264,6 +8445,9 @@ class CommonDataServiceForAppsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior @@ -8289,6 +8473,7 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -8324,6 +8509,9 @@ class CommonDataServiceForAppsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object @@ -8342,6 +8530,7 @@ class CommonDataServiceForAppsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -8587,6 +8776,9 @@ class ConcurSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -8608,6 +8800,7 @@ class ConcurSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -9130,6 +9323,9 @@ class CosmosDbMongoDbApiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). @@ -9148,6 +9344,7 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -9179,6 +9376,9 @@ class CosmosDbMongoDbApiSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). @@ -9208,6 +9408,7 @@ class CosmosDbMongoDbApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, @@ -9313,6 +9514,9 @@ class CosmosDbSqlApiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. :type write_behavior: object @@ -9330,6 +9534,7 @@ class CosmosDbSqlApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -9361,6 +9566,9 @@ class CosmosDbSqlApiSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: SQL API query. Type: string (or Expression with resultType string). :type query: object :param page_size: Page size of the result. Type: integer (or Expression with resultType @@ -9387,6 +9595,7 @@ class CosmosDbSqlApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, @@ -9482,6 +9691,9 @@ class CouchbaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -9503,6 +9715,7 @@ class CouchbaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -10999,8 +11212,8 @@ class DatasetDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -11010,7 +11223,7 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11051,8 +11264,8 @@ class DatasetGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -11062,7 +11275,7 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11250,8 +11463,8 @@ class DatasetTarGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param level: The TarGZip compression level. + :type level: object """ _validation = { @@ -11261,7 +11474,7 @@ class DatasetTarGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11283,8 +11496,8 @@ class DatasetZipDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -11294,7 +11507,7 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -11413,6 +11626,9 @@ class Db2Source(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -11433,6 +11649,7 @@ class Db2Source(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -11652,12 +11869,11 @@ class DelimitedTextDataset(Dataset): https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). :type encoding_name: object - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: object + :param compression_level: The data compression method used for DelimitedText. + :type compression_level: object :param quote_char: The quote character. Type: string (or Expression with resultType string). :type quote_char: object :param escape_char: The escape character. Type: string (or Expression with resultType string). @@ -11689,8 +11905,8 @@ class DelimitedTextDataset(Dataset): 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, @@ -11778,6 +11994,9 @@ class DelimitedTextSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: DelimitedText store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: DelimitedText format settings. @@ -11796,6 +12015,7 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } @@ -11829,6 +12049,9 @@ class DelimitedTextSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: DelimitedText store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: DelimitedText format settings. @@ -11848,6 +12071,7 @@ class DelimitedTextSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -12070,6 +12294,9 @@ class DocumentDbCollectionSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object @@ -12090,6 +12317,7 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -12123,6 +12351,9 @@ class DocumentDbCollectionSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Documents query. Type: string (or Expression with resultType string). :type query: object :param nesting_separator: Nested properties separator. Type: string (or Expression with @@ -12146,6 +12377,7 @@ class DocumentDbCollectionSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, @@ -12239,6 +12471,9 @@ class DrillSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -12260,6 +12495,7 @@ class DrillSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -12552,6 +12788,9 @@ class DynamicsAxSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -12578,6 +12817,7 @@ class DynamicsAxSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -12673,9 +12913,8 @@ class DynamicsCrmLinkedService(LinkedService): :type annotations: list[object] :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). Possible values include: "Online", - "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + string (or Expression with resultType string). + :type deployment_type: object :param host_name: The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). @@ -12694,10 +12933,8 @@ class DynamicsCrmLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: object :param username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). :type username: object @@ -12708,10 +12945,8 @@ class DynamicsCrmLinkedService(LinkedService): :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -12737,16 +12972,16 @@ class DynamicsCrmLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -12796,6 +13031,9 @@ class DynamicsCrmSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior @@ -12821,6 +13059,7 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -12856,6 +13095,9 @@ class DynamicsCrmSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object @@ -12874,6 +13116,7 @@ class DynamicsCrmSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -12967,8 +13210,8 @@ class DynamicsLinkedService(LinkedService): :type annotations: list[object] :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + Expression with resultType string). + :type deployment_type: object :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). :type host_name: object @@ -12986,9 +13229,8 @@ class DynamicsLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + resultType string). + :type authentication_type: object :param username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). :type username: object @@ -12999,10 +13241,8 @@ class DynamicsLinkedService(LinkedService): :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: str :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -13028,12 +13268,12 @@ class DynamicsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, @@ -13087,6 +13327,9 @@ class DynamicsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior @@ -13112,6 +13355,7 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -13147,6 +13391,9 @@ class DynamicsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object @@ -13165,6 +13412,7 @@ class DynamicsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -13334,6 +13582,9 @@ class EloquaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -13355,6 +13606,7 @@ class EloquaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -13414,7 +13666,7 @@ class EntityReference(msrest.serialization.Model): """The entity reference. :param type: The type of this referenced entity. Possible values include: - "IntegrationRuntimeReference", "LinkedServiceReference". + "IntegrationRuntimeReference", "LinkedServiceReference", "CredentialReference". :type type: str or ~data_factory_management_client.models.IntegrationRuntimeEntityReferenceType :param reference_name: The name of this referenced entity. :type reference_name: str @@ -13498,9 +13750,12 @@ class ExcelDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the excel storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType + :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType string). :type sheet_name: object + :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or + Expression with resultType integer). + :type sheet_index: object :param range: The partial data of one sheet. Type: string (or Expression with resultType string). :type range: object @@ -13531,6 +13786,7 @@ class ExcelDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, + 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'}, 'range': {'key': 'typeProperties.range', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, @@ -13545,6 +13801,7 @@ def __init__( self.type = 'Excel' # type: str self.location = kwargs.get('location', None) self.sheet_name = kwargs.get('sheet_name', None) + self.sheet_index = kwargs.get('sheet_index', None) self.range = kwargs.get('range', None) self.first_row_as_header = kwargs.get('first_row_as_header', None) self.compression = kwargs.get('compression', None) @@ -13570,6 +13827,9 @@ class ExcelSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Excel store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -13587,6 +13847,7 @@ class ExcelSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -14527,6 +14788,9 @@ class FileServerReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -14567,6 +14831,7 @@ class FileServerReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -14610,6 +14875,9 @@ class FileServerWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -14622,6 +14890,7 @@ class FileServerWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -14745,6 +15014,9 @@ class FileSystemSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -14761,6 +15033,7 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -14792,6 +15065,9 @@ class FileSystemSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -14810,6 +15086,7 @@ class FileSystemSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -14951,6 +15228,9 @@ class FtpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -14984,6 +15264,7 @@ class FtpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -15502,6 +15783,9 @@ class GoogleAdWordsSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -15523,6 +15807,7 @@ class GoogleAdWordsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -15731,6 +16016,9 @@ class GoogleBigQuerySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -15752,6 +16040,7 @@ class GoogleBigQuerySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -15890,6 +16179,9 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -15930,6 +16222,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -16035,6 +16328,9 @@ class GreenplumSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -16056,6 +16352,7 @@ class GreenplumSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -16313,6 +16610,9 @@ class HBaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -16334,6 +16634,7 @@ class HBaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -16465,6 +16766,9 @@ class HdfsReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -16504,6 +16808,7 @@ class HdfsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -16553,6 +16858,9 @@ class HdfsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -16570,6 +16878,7 @@ class HdfsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } @@ -17530,6 +17839,9 @@ class HiveSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -17551,6 +17863,7 @@ class HiveSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -17755,6 +18068,9 @@ class HttpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object @@ -17782,6 +18098,7 @@ class HttpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, @@ -17865,6 +18182,9 @@ class HttpSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: @@ -17882,6 +18202,7 @@ class HttpSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -18053,6 +18374,9 @@ class HubspotSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -18074,6 +18398,7 @@ class HubspotSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -18335,6 +18660,9 @@ class ImpalaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -18356,6 +18684,7 @@ class ImpalaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -18469,6 +18798,9 @@ class InformixSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -18486,6 +18818,7 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -18517,6 +18850,9 @@ class InformixSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -18537,6 +18873,7 @@ class InformixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -19225,6 +19562,8 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): list[~data_factory_management_client.models.CustomSetupBase] :param package_stores: Package stores for the SSIS Integration Runtime. :type package_stores: list[~data_factory_management_client.models.PackageStore] + :param managed_credential: The user-assigned managed identity reference. + :type managed_credential: ~data_factory_management_client.models.EntityReference """ _attribute_map = { @@ -19236,6 +19575,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): 'edition': {'key': 'edition', 'type': 'str'}, 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, + 'managed_credential': {'key': 'managedCredential', 'type': 'EntityReference'}, } def __init__( @@ -19251,6 +19591,7 @@ def __init__( self.edition = kwargs.get('edition', None) self.express_custom_setup_properties = kwargs.get('express_custom_setup_properties', None) self.package_stores = kwargs.get('package_stores', None) + self.managed_credential = kwargs.get('managed_credential', None) class IntegrationRuntimeStatus(msrest.serialization.Model): @@ -19560,6 +19901,9 @@ class JiraSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -19581,6 +19925,7 @@ class JiraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -19680,9 +20025,8 @@ class JsonFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object :param file_pattern: File pattern of JSON. To be more specific, the way of separating a - collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~data_factory_management_client.models.JsonFormatFilePattern + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). :type nesting_separator: object @@ -19712,7 +20056,7 @@ class JsonFormat(DatasetStorageFormat): 'type': {'key': 'type', 'type': 'str'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, @@ -19790,6 +20134,9 @@ class JsonSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Json store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: Json format settings. @@ -19808,6 +20155,7 @@ class JsonSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } @@ -19841,6 +20189,9 @@ class JsonSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Json store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: Json format settings. @@ -19860,6 +20211,7 @@ class JsonSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -19887,9 +20239,8 @@ class JsonWriteSettings(FormatWriteSettings): :param type: Required. The write setting type.Constant filled by server. :type type: str :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON - objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~data_factory_management_client.models.JsonWriteFilePattern + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: object """ _validation = { @@ -19899,7 +20250,7 @@ class JsonWriteSettings(FormatWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, } def __init__( @@ -20538,6 +20889,9 @@ class MagentoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -20559,6 +20913,7 @@ class MagentoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -21219,6 +21574,9 @@ class MariaDbSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -21240,6 +21598,7 @@ class MariaDbSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -21466,6 +21825,9 @@ class MarketoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -21487,6 +21849,7 @@ class MarketoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -21501,6 +21864,29 @@ def __init__( self.query = kwargs.get('query', None) +class MetadataItem(msrest.serialization.Model): + """Specify the name and value of custom metadata item. + + :param name: Metadata item key name. Type: string (or Expression with resultType string). + :type name: object + :param value: Metadata item value. Type: string (or Expression with resultType string). + :type value: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MetadataItem, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) + + class MicrosoftAccessLinkedService(LinkedService): """Microsoft Access linked service. @@ -21600,6 +21986,9 @@ class MicrosoftAccessSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -21617,6 +22006,7 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -21648,6 +22038,9 @@ class MicrosoftAccessSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -21665,6 +22058,7 @@ class MicrosoftAccessSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -21852,6 +22246,65 @@ def __init__( self.database = kwargs['database'] +class MongoDbAtlasSink(CopySink): + """A copy activity MongoDB Atlas sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbAtlasSink, self).__init__(**kwargs) + self.type = 'MongoDbAtlasSink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + + class MongoDbAtlasSource(CopySource): """A copy activity source for a MongoDB Atlas database. @@ -21871,6 +22324,9 @@ class MongoDbAtlasSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). @@ -21900,6 +22356,7 @@ class MongoDbAtlasSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, @@ -22136,6 +22593,9 @@ class MongoDbSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object @@ -22154,6 +22614,7 @@ class MongoDbSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -22281,6 +22742,65 @@ def __init__( self.database = kwargs['database'] +class MongoDbV2Sink(CopySink): + """A copy activity MongoDB sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbV2Sink, self).__init__(**kwargs) + self.type = 'MongoDbV2Sink' # type: str + self.write_behavior = kwargs.get('write_behavior', None) + + class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. @@ -22300,6 +22820,9 @@ class MongoDbV2Source(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). @@ -22329,6 +22852,7 @@ class MongoDbV2Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, @@ -22424,6 +22948,9 @@ class MySqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -22444,6 +22971,7 @@ class MySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -22623,6 +23151,9 @@ class NetezzaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -22649,6 +23180,7 @@ class NetezzaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -22932,6 +23464,9 @@ class ODataSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -22955,6 +23490,7 @@ class ODataSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -23069,6 +23605,9 @@ class OdbcSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -23086,6 +23625,7 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -23117,6 +23657,9 @@ class OdbcSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -23137,6 +23680,7 @@ class OdbcSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -23363,6 +23907,9 @@ class Office365Source(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). :type allowed_groups: object @@ -23394,6 +23941,7 @@ class Office365Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, @@ -23783,6 +24331,9 @@ class OracleCloudStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -23823,6 +24374,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -24103,6 +24655,9 @@ class OracleServiceCloudSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -24124,6 +24679,7 @@ class OracleServiceCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -24163,6 +24719,9 @@ class OracleSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -24180,6 +24739,7 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -24211,6 +24771,9 @@ class OracleSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). :type oracle_reader_query: object @@ -24237,6 +24800,7 @@ class OracleSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, @@ -24355,8 +24919,9 @@ class OrcDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". - :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec + :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with + resultType string). + :type orc_compression_codec: object """ _validation = { @@ -24375,7 +24940,7 @@ class OrcDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, + 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'}, } def __init__( @@ -24448,6 +25013,9 @@ class OrcSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: ORC format settings. @@ -24466,6 +25034,7 @@ class OrcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } @@ -24499,6 +25068,9 @@ class OrcSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -24516,6 +25088,7 @@ class OrcSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -24659,9 +25232,9 @@ class ParquetDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the parquet storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: object """ _validation = { @@ -24680,7 +25253,7 @@ class ParquetDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } def __init__( @@ -24753,6 +25326,9 @@ class ParquetSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: Parquet format settings. @@ -24771,6 +25347,7 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } @@ -24804,6 +25381,9 @@ class ParquetSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -24821,6 +25401,7 @@ class ParquetSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -25029,6 +25610,9 @@ class PaypalSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -25050,6 +25634,7 @@ class PaypalSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -25258,6 +25843,9 @@ class PhoenixSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -25279,6 +25867,7 @@ class PhoenixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -25768,6 +26357,9 @@ class PostgreSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -25788,6 +26380,7 @@ class PostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -26072,6 +26665,9 @@ class PrestoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -26093,6 +26689,7 @@ class PrestoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -26544,6 +27141,9 @@ class QuickBooksSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -26565,6 +27165,7 @@ class QuickBooksSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -26739,6 +27340,9 @@ class RelationalSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -26756,6 +27360,7 @@ class RelationalSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -27085,6 +27690,9 @@ class ResponsysSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -27106,6 +27714,7 @@ class ResponsysSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -27328,6 +27937,9 @@ class RestSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). :type request_method: object @@ -27358,6 +27970,7 @@ class RestSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, @@ -27397,6 +28010,9 @@ class RestSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object @@ -27431,6 +28047,7 @@ class RestSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, @@ -27832,6 +28449,9 @@ class SalesforceMarketingCloudSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -27853,6 +28473,7 @@ class SalesforceMarketingCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -28087,6 +28708,9 @@ class SalesforceServiceCloudSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". :type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior @@ -28114,6 +28738,7 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -28149,6 +28774,9 @@ class SalesforceServiceCloudSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values @@ -28169,6 +28797,7 @@ class SalesforceServiceCloudSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -28210,6 +28839,9 @@ class SalesforceSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". :type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior @@ -28237,6 +28869,7 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -28272,6 +28905,9 @@ class SalesforceSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -28295,6 +28931,7 @@ class SalesforceSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -28458,6 +29095,9 @@ class SapBwSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -28478,6 +29118,7 @@ class SapBwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -28640,6 +29281,9 @@ class SapCloudForCustomerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: "Insert", "Update". :type write_behavior: str or @@ -28663,6 +29307,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -28696,6 +29341,9 @@ class SapCloudForCustomerSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -28722,6 +29370,7 @@ class SapCloudForCustomerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -28880,6 +29529,9 @@ class SapEccSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -28906,6 +29558,7 @@ class SapEccSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -29033,6 +29686,9 @@ class SapHanaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -29062,6 +29718,7 @@ class SapHanaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -29263,6 +29920,9 @@ class SapOpenHubSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -29295,6 +29955,7 @@ class SapOpenHubSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, @@ -29625,6 +30286,9 @@ class SapTableSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -29672,6 +30336,7 @@ class SapTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, @@ -29806,9 +30471,8 @@ class ScriptAction(msrest.serialization.Model): :type name: str :param uri: Required. The URI for the script action. :type uri: str - :param roles: Required. The node types on which the script action should be executed. Possible - values include: "Headnode", "Workernode", "Zookeeper". - :type roles: str or ~data_factory_management_client.models.HdiNodeTypes + :param roles: Required. The node types on which the script action should be executed. + :type roles: str :param parameters: The parameters for the script action. :type parameters: str """ @@ -30364,6 +31028,9 @@ class ServiceNowSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -30385,6 +31052,7 @@ class ServiceNowSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -30499,6 +31167,9 @@ class SftpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -30536,6 +31207,7 @@ class SftpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -30678,6 +31350,9 @@ class SftpWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default @@ -30697,6 +31372,7 @@ class SftpWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, @@ -30864,6 +31540,9 @@ class SharePointOnlineListSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -30883,6 +31562,7 @@ class SharePointOnlineListSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -31047,6 +31727,9 @@ class ShopifySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -31068,6 +31751,7 @@ class ShopifySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -31339,6 +32023,9 @@ class SnowflakeSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -31358,6 +32045,7 @@ class SnowflakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, } @@ -31391,6 +32079,9 @@ class SnowflakeSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Snowflake Sql query. Type: string (or Expression with resultType string). :type query: object :param export_settings: Snowflake export settings. @@ -31407,6 +32098,7 @@ class SnowflakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, } @@ -31624,6 +32316,9 @@ class SparkSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -31645,6 +32340,7 @@ class SparkSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -31722,6 +32418,9 @@ class SqlDwSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -31753,6 +32452,7 @@ class SqlDwSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, @@ -31794,6 +32494,9 @@ class SqlDwSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -31828,6 +32531,7 @@ class SqlDwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -31875,6 +32579,9 @@ class SqlMiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -31907,6 +32614,7 @@ class SqlMiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -31948,6 +32656,9 @@ class SqlMiSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -31983,6 +32694,7 @@ class SqlMiSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -32134,6 +32846,9 @@ class SqlServerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -32166,6 +32881,7 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -32207,6 +32923,9 @@ class SqlServerSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -32242,6 +32961,7 @@ class SqlServerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -32420,6 +33140,9 @@ class SqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -32452,6 +33175,7 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -32493,6 +33217,9 @@ class SqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -32530,6 +33257,7 @@ class SqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -32716,6 +33444,9 @@ class SquareSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -32737,6 +33468,7 @@ class SquareSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -33679,6 +34411,9 @@ class SybaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -33699,6 +34434,7 @@ class SybaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -34030,6 +34766,9 @@ class TeradataSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -34056,6 +34795,7 @@ class TeradataSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -35107,6 +35847,9 @@ class VerticaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -35128,6 +35871,7 @@ class VerticaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -35699,6 +36443,9 @@ class WebSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] @@ -35714,6 +36461,7 @@ class WebSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -35950,6 +36698,9 @@ class XeroSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -35971,6 +36722,7 @@ class XeroSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -36133,6 +36885,9 @@ class XmlSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Xml store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: Xml format settings. @@ -36152,6 +36907,7 @@ class XmlSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -36355,6 +37111,9 @@ class ZohoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -36376,6 +37135,7 @@ class ZohoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py index f6ebc8328ae..e548b5abb6c 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py @@ -711,6 +711,9 @@ class CopySource(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -723,6 +726,7 @@ class CopySource(msrest.serialization.Model): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -736,6 +740,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, **kwargs ): super(CopySource, self).__init__(**kwargs) @@ -744,6 +749,7 @@ def __init__( self.source_retry_count = source_retry_count self.source_retry_wait = source_retry_wait self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection class TabularSource(CopySource): @@ -768,6 +774,9 @@ class TabularSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -786,6 +795,7 @@ class TabularSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -801,11 +811,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'TabularSource' # type: str self.query_timeout = query_timeout self.additional_columns = additional_columns @@ -830,6 +841,9 @@ class AmazonMwsSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -851,6 +865,7 @@ class AmazonMwsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -863,12 +878,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(AmazonMwsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AmazonMwsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonMWSSource' # type: str self.query = query @@ -977,6 +993,9 @@ class AmazonRedshiftSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -1001,6 +1020,7 @@ class AmazonRedshiftSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -1014,13 +1034,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, redshift_unload_settings: Optional["RedshiftUnloadSettings"] = None, **kwargs ): - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonRedshiftSource' # type: str self.query = query self.redshift_unload_settings = redshift_unload_settings @@ -1307,6 +1328,9 @@ class StoreReadSettings(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -1317,6 +1341,7 @@ class StoreReadSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { @@ -1328,12 +1353,14 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, **kwargs ): super(StoreReadSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.type = 'StoreReadSettings' # type: str self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection class AmazonS3CompatibleReadSettings(StoreReadSettings): @@ -1349,6 +1376,9 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -1389,6 +1419,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1406,6 +1437,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -1418,7 +1450,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AmazonS3CompatibleReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -1692,6 +1724,9 @@ class AmazonS3ReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -1732,6 +1767,7 @@ class AmazonS3ReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -1749,6 +1785,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -1761,7 +1798,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AmazonS3ReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -1887,10 +1924,9 @@ class AvroDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the avro storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or - ~data_factory_management_client.models.AvroCompressionCodec + :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with + resultType string). + :type avro_compression_codec: object :param avro_compression_level: :type avro_compression_level: int """ @@ -1912,7 +1948,7 @@ class AvroDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'}, 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } @@ -1928,7 +1964,7 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - avro_compression_codec: Optional[Union[str, "AvroCompressionCodec"]] = None, + avro_compression_codec: Optional[object] = None, avro_compression_level: Optional[int] = None, **kwargs ): @@ -2031,7 +2067,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. @@ -2055,6 +2091,9 @@ class CopySink(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -2069,10 +2108,11 @@ class CopySink(msrest.serialization.Model): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -2084,6 +2124,7 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, **kwargs ): super(CopySink, self).__init__(**kwargs) @@ -2094,6 +2135,7 @@ def __init__( self.sink_retry_count = sink_retry_count self.sink_retry_wait = sink_retry_wait self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection class AvroSink(CopySink): @@ -2121,6 +2163,9 @@ class AvroSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Avro store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: Avro format settings. @@ -2139,6 +2184,7 @@ class AvroSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'}, } @@ -2152,11 +2198,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["AvroWriteSettings"] = None, **kwargs ): - super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AvroSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -2181,6 +2228,9 @@ class AvroSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Avro store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -2198,6 +2248,7 @@ class AvroSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -2209,11 +2260,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AvroSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -2802,6 +2854,9 @@ class AzureBlobFsReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -2839,6 +2894,7 @@ class AzureBlobFsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -2855,6 +2911,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -2866,7 +2923,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureBlobFsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -2904,8 +2961,14 @@ class AzureBlobFsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~data_factory_management_client.models.MetadataItem] """ _validation = { @@ -2920,7 +2983,9 @@ class AzureBlobFsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -2932,12 +2997,15 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, + metadata: Optional[List["MetadataItem"]] = None, **kwargs ): - super(AzureBlobFsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSSink' # type: str self.copy_behavior = copy_behavior + self.metadata = metadata class AzureBlobFsSource(CopySource): @@ -2959,6 +3027,9 @@ class AzureBlobFsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object @@ -2980,6 +3051,7 @@ class AzureBlobFsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -2992,12 +3064,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, treat_empty_as_null: Optional[object] = None, skip_header_line_count: Optional[object] = None, recursive: Optional[object] = None, **kwargs ): - super(AzureBlobFsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobFsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobFSSource' # type: str self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count @@ -3020,6 +3093,9 @@ class StoreWriteSettings(msrest.serialization.Model): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -3032,6 +3108,7 @@ class StoreWriteSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -3044,6 +3121,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, **kwargs ): @@ -3051,6 +3129,7 @@ def __init__( self.additional_properties = additional_properties self.type = 'StoreWriteSettings' # type: str self.max_concurrent_connections = max_concurrent_connections + self.disable_metrics_collection = disable_metrics_collection self.copy_behavior = copy_behavior @@ -3067,6 +3146,9 @@ class AzureBlobFsWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer @@ -3082,6 +3164,7 @@ class AzureBlobFsWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -3091,11 +3174,12 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, block_size_in_mb: Optional[object] = None, **kwargs ): - super(AzureBlobFsWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureBlobFsWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureBlobFSWriteSettings' # type: str self.block_size_in_mb = block_size_in_mb @@ -3275,6 +3359,9 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -3315,6 +3402,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -3332,6 +3420,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -3344,7 +3433,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureBlobStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -3371,6 +3460,9 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer @@ -3386,6 +3478,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'}, } @@ -3395,11 +3488,12 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, block_size_in_mb: Optional[object] = None, **kwargs ): - super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureBlobStorageWriteSettings' # type: str self.block_size_in_mb = block_size_in_mb @@ -3741,6 +3835,9 @@ class AzureDatabricksDeltaLakeSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -3761,6 +3858,7 @@ class AzureDatabricksDeltaLakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, } @@ -3774,11 +3872,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, import_settings: Optional["AzureDatabricksDeltaLakeImportCommand"] = None, **kwargs ): - super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDatabricksDeltaLakeSink' # type: str self.pre_copy_script = pre_copy_script self.import_settings = import_settings @@ -3803,6 +3902,9 @@ class AzureDatabricksDeltaLakeSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with resultType string). :type query: object @@ -3821,6 +3923,7 @@ class AzureDatabricksDeltaLakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, } @@ -3832,11 +3935,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, export_settings: Optional["AzureDatabricksDeltaLakeExportCommand"] = None, **kwargs ): - super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDatabricksDeltaLakeSource' # type: str self.query = query self.export_settings = export_settings @@ -4248,6 +4352,9 @@ class AzureDataExplorerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. :type ingestion_mapping_name: object @@ -4271,6 +4378,7 @@ class AzureDataExplorerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'}, 'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'}, 'flush_immediately': {'key': 'flushImmediately', 'type': 'object'}, @@ -4285,12 +4393,13 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, ingestion_mapping_name: Optional[object] = None, ingestion_mapping_as_json: Optional[object] = None, flush_immediately: Optional[object] = None, **kwargs ): - super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataExplorerSink' # type: str self.ingestion_mapping_name = ingestion_mapping_name self.ingestion_mapping_as_json = ingestion_mapping_as_json @@ -4316,6 +4425,9 @@ class AzureDataExplorerSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). :type query: object @@ -4341,6 +4453,7 @@ class AzureDataExplorerSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, @@ -4355,12 +4468,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, no_truncation: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataExplorerSource' # type: str self.query = query self.no_truncation = no_truncation @@ -4779,6 +4893,9 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4824,6 +4941,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -4842,6 +4960,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -4855,7 +4974,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -4895,6 +5014,9 @@ class AzureDataLakeStoreSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param enable_adls_single_file_parallel: Single File Parallel. @@ -4913,6 +5035,7 @@ class AzureDataLakeStoreSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'}, } @@ -4926,11 +5049,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, enable_adls_single_file_parallel: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreSink' # type: str self.copy_behavior = copy_behavior self.enable_adls_single_file_parallel = enable_adls_single_file_parallel @@ -4955,6 +5079,9 @@ class AzureDataLakeStoreSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -4970,6 +5097,7 @@ class AzureDataLakeStoreSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, } @@ -4980,10 +5108,11 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureDataLakeStoreSource' # type: str self.recursive = recursive @@ -5001,6 +5130,9 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to @@ -5017,6 +5149,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } @@ -5026,11 +5159,12 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, expiry_date_time: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureDataLakeStoreWriteSettings' # type: str self.expiry_date_time = expiry_date_time @@ -5193,6 +5327,9 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -5233,6 +5370,7 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -5250,6 +5388,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -5262,7 +5401,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureFileStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -5289,6 +5428,9 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -5301,6 +5443,7 @@ class AzureFileStorageWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -5309,10 +5452,11 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, **kwargs ): - super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'AzureFileStorageWriteSettings' # type: str @@ -5680,6 +5824,9 @@ class AzureMariaDbSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -5701,6 +5848,7 @@ class AzureMariaDbSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -5713,12 +5861,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(AzureMariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureMariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMariaDBSource' # type: str self.query = query @@ -6375,6 +6524,9 @@ class AzureMySqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -6392,6 +6544,7 @@ class AzureMySqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -6404,10 +6557,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureMySqlSink' # type: str self.pre_copy_script = pre_copy_script @@ -6431,6 +6585,9 @@ class AzureMySqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -6451,6 +6608,7 @@ class AzureMySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -6463,12 +6621,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMySqlSource' # type: str self.query = query @@ -6638,6 +6797,9 @@ class AzurePostgreSqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -6655,6 +6817,7 @@ class AzurePostgreSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -6667,10 +6830,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzurePostgreSqlSink' # type: str self.pre_copy_script = pre_copy_script @@ -6694,6 +6858,9 @@ class AzurePostgreSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -6715,6 +6882,7 @@ class AzurePostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -6727,12 +6895,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzurePostgreSqlSource' # type: str self.query = query @@ -6843,6 +7012,9 @@ class AzureQueueSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object """ _validation = { @@ -6857,6 +7029,7 @@ class AzureQueueSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, } def __init__( @@ -6868,9 +7041,10 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, **kwargs ): - super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureQueueSink' # type: str @@ -6969,6 +7143,9 @@ class AzureSearchIndexSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Specify the write behavior when upserting documents into Azure Search Index. Possible values include: "Merge", "Upload". :type write_behavior: str or @@ -6987,6 +7164,7 @@ class AzureSearchIndexSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, } @@ -6999,10 +7177,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[Union[str, "AzureSearchIndexWriteBehaviorType"]] = None, **kwargs ): - super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureSearchIndexSink' # type: str self.write_behavior = write_behavior @@ -7545,6 +7724,9 @@ class AzureSqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -7577,6 +7759,7 @@ class AzureSqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -7594,6 +7777,7 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, sql_writer_stored_procedure_name: Optional[object] = None, sql_writer_table_type: Optional[object] = None, pre_copy_script: Optional[object] = None, @@ -7602,7 +7786,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureSqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -7631,6 +7815,9 @@ class AzureSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -7666,6 +7853,7 @@ class AzureSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -7683,6 +7871,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, sql_reader_query: Optional[object] = None, @@ -7693,7 +7882,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureSqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -7955,6 +8144,9 @@ class AzureTableSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param azure_table_default_partition_key_value: Azure Table default partition key value. Type: string (or Expression with resultType string). :type azure_table_default_partition_key_value: object @@ -7981,6 +8173,7 @@ class AzureTableSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'}, 'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'}, 'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'}, @@ -7996,13 +8189,14 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, azure_table_default_partition_key_value: Optional[object] = None, azure_table_partition_key_name: Optional[object] = None, azure_table_row_key_name: Optional[object] = None, azure_table_insert_type: Optional[object] = None, **kwargs ): - super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'AzureTableSink' # type: str self.azure_table_default_partition_key_value = azure_table_default_partition_key_value self.azure_table_partition_key_name = azure_table_partition_key_name @@ -8029,6 +8223,9 @@ class AzureTableSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -8053,6 +8250,7 @@ class AzureTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, @@ -8066,13 +8264,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, azure_table_source_query: Optional[object] = None, azure_table_source_ignore_table_not_found: Optional[object] = None, **kwargs ): - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureTableSource' # type: str self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found @@ -8327,6 +8526,9 @@ class BinarySink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Binary store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings """ @@ -8343,6 +8545,7 @@ class BinarySink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, } @@ -8355,10 +8558,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, **kwargs ): - super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BinarySink' # type: str self.store_settings = store_settings @@ -8382,6 +8586,9 @@ class BinarySource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Binary store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: Binary format settings. @@ -8398,6 +8605,7 @@ class BinarySource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } @@ -8409,11 +8617,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["BinaryReadSettings"] = None, **kwargs ): - super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BinarySource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -8641,6 +8850,9 @@ class BlobSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). :type blob_writer_overwrite_files: object @@ -8652,6 +8864,9 @@ class BlobSink(CopySink): :type blob_writer_add_header: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects + (or Expression with resultType array of objects). + :type metadata: list[~data_factory_management_client.models.MetadataItem] """ _validation = { @@ -8666,10 +8881,12 @@ class BlobSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'}, 'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'}, 'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'}, } def __init__( @@ -8681,18 +8898,21 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, blob_writer_overwrite_files: Optional[object] = None, blob_writer_date_time_format: Optional[object] = None, blob_writer_add_header: Optional[object] = None, copy_behavior: Optional[object] = None, + metadata: Optional[List["MetadataItem"]] = None, **kwargs ): - super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BlobSink' # type: str self.blob_writer_overwrite_files = blob_writer_overwrite_files self.blob_writer_date_time_format = blob_writer_date_time_format self.blob_writer_add_header = blob_writer_add_header self.copy_behavior = copy_behavior + self.metadata = metadata class BlobSource(CopySource): @@ -8714,6 +8934,9 @@ class BlobSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType boolean). :type treat_empty_as_null: object @@ -8735,6 +8958,7 @@ class BlobSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'}, 'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, @@ -8747,12 +8971,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, treat_empty_as_null: Optional[object] = None, skip_header_line_count: Optional[object] = None, recursive: Optional[object] = None, **kwargs ): - super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'BlobSource' # type: str self.treat_empty_as_null = treat_empty_as_null self.skip_header_line_count = skip_header_line_count @@ -8931,6 +9156,9 @@ class CassandraSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -8960,6 +9188,7 @@ class CassandraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -8973,13 +9202,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, consistency_level: Optional[Union[str, "CassandraSourceReadConsistencyLevels"]] = None, **kwargs ): - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'CassandraSource' # type: str self.query = query self.consistency_level = consistency_level @@ -9325,8 +9555,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param deployment_type: Required. The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType - string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + string). + :type deployment_type: object :param host_name: The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). @@ -9347,10 +9577,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: object :param username: User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). :type username: object @@ -9361,10 +9589,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -9390,16 +9616,16 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -9407,8 +9633,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService): def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], + deployment_type: object, + authentication_type: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, @@ -9421,7 +9647,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential_type: Optional[object] = None, service_principal_credential: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -9467,6 +9693,9 @@ class CommonDataServiceForAppsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior @@ -9492,6 +9721,7 @@ class CommonDataServiceForAppsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -9507,11 +9737,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, ignore_null_values: Optional[object] = None, alternate_key_name: Optional[object] = None, **kwargs ): - super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CommonDataServiceForAppsSink' # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values @@ -9537,6 +9768,9 @@ class CommonDataServiceForAppsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object @@ -9555,6 +9789,7 @@ class CommonDataServiceForAppsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -9566,11 +9801,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CommonDataServiceForAppsSource' # type: str self.query = query self.additional_columns = additional_columns @@ -9836,6 +10072,9 @@ class ConcurSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -9857,6 +10096,7 @@ class ConcurSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -9869,12 +10109,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ConcurSource' # type: str self.query = query @@ -10458,6 +10699,9 @@ class CosmosDbMongoDbApiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). @@ -10476,6 +10720,7 @@ class CosmosDbMongoDbApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -10488,10 +10733,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[object] = None, **kwargs ): - super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbMongoDbApiSink' # type: str self.write_behavior = write_behavior @@ -10515,6 +10761,9 @@ class CosmosDbMongoDbApiSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). @@ -10544,6 +10793,7 @@ class CosmosDbMongoDbApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, @@ -10558,6 +10808,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, filter: Optional[object] = None, cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, @@ -10565,7 +10816,7 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbMongoDbApiSource' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -10669,6 +10920,9 @@ class CosmosDbSqlApiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. :type write_behavior: object @@ -10686,6 +10940,7 @@ class CosmosDbSqlApiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -10698,10 +10953,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[object] = None, **kwargs ): - super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbSqlApiSink' # type: str self.write_behavior = write_behavior @@ -10725,6 +10981,9 @@ class CosmosDbSqlApiSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: SQL API query. Type: string (or Expression with resultType string). :type query: object :param page_size: Page size of the result. Type: integer (or Expression with resultType @@ -10751,6 +11010,7 @@ class CosmosDbSqlApiSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, @@ -10765,6 +11025,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, page_size: Optional[object] = None, preferred_regions: Optional[object] = None, @@ -10772,7 +11033,7 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'CosmosDbSqlApiSource' # type: str self.query = query self.page_size = page_size @@ -10865,6 +11126,9 @@ class CouchbaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -10886,6 +11150,7 @@ class CouchbaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -10898,12 +11163,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'CouchbaseSource' # type: str self.query = query @@ -12594,8 +12860,8 @@ class DatasetDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -12605,14 +12871,14 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12651,8 +12917,8 @@ class DatasetGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -12662,14 +12928,14 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12867,8 +13133,8 @@ class DatasetTarGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param level: The TarGZip compression level. + :type level: object """ _validation = { @@ -12878,14 +13144,14 @@ class DatasetTarGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -12903,8 +13169,8 @@ class DatasetZipDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -12914,14 +13180,14 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -13051,6 +13317,9 @@ class Db2Source(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -13071,6 +13340,7 @@ class Db2Source(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -13083,12 +13353,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'Db2Source' # type: str self.query = query @@ -13326,12 +13597,11 @@ class DelimitedTextDataset(Dataset): https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). :type encoding_name: object - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~data_factory_management_client.models.DatasetCompressionLevel + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: object + :param compression_level: The data compression method used for DelimitedText. + :type compression_level: object :param quote_char: The quote character. Type: string (or Expression with resultType string). :type quote_char: object :param escape_char: The escape character. Type: string (or Expression with resultType string). @@ -13363,8 +13633,8 @@ class DelimitedTextDataset(Dataset): 'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'}, 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, @@ -13386,8 +13656,8 @@ def __init__( column_delimiter: Optional[object] = None, row_delimiter: Optional[object] = None, encoding_name: Optional[object] = None, - compression_codec: Optional[Union[str, "CompressionCodec"]] = None, - compression_level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + compression_codec: Optional[object] = None, + compression_level: Optional[object] = None, quote_char: Optional[object] = None, escape_char: Optional[object] = None, first_row_as_header: Optional[object] = None, @@ -13475,6 +13745,9 @@ class DelimitedTextSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: DelimitedText store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: DelimitedText format settings. @@ -13493,6 +13766,7 @@ class DelimitedTextSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'}, } @@ -13506,11 +13780,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["DelimitedTextWriteSettings"] = None, **kwargs ): - super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DelimitedTextSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -13535,6 +13810,9 @@ class DelimitedTextSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: DelimitedText store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: DelimitedText format settings. @@ -13554,6 +13832,7 @@ class DelimitedTextSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -13566,12 +13845,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["DelimitedTextReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DelimitedTextSource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -13804,6 +14084,9 @@ class DocumentDbCollectionSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). :type nesting_separator: object @@ -13824,6 +14107,7 @@ class DocumentDbCollectionSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } @@ -13837,11 +14121,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, nesting_separator: Optional[object] = None, write_behavior: Optional[object] = None, **kwargs ): - super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DocumentDbCollectionSink' # type: str self.nesting_separator = nesting_separator self.write_behavior = write_behavior @@ -13866,6 +14151,9 @@ class DocumentDbCollectionSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Documents query. Type: string (or Expression with resultType string). :type query: object :param nesting_separator: Nested properties separator. Type: string (or Expression with @@ -13889,6 +14177,7 @@ class DocumentDbCollectionSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, @@ -13902,13 +14191,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, nesting_separator: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DocumentDbCollectionSource' # type: str self.query = query self.nesting_separator = nesting_separator @@ -14000,6 +14290,9 @@ class DrillSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -14021,6 +14314,7 @@ class DrillSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -14033,12 +14327,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'DrillSource' # type: str self.query = query @@ -14361,6 +14656,9 @@ class DynamicsAxSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -14387,6 +14685,7 @@ class DynamicsAxSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -14400,13 +14699,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(DynamicsAxSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(DynamicsAxSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'DynamicsAXSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -14501,9 +14801,8 @@ class DynamicsCrmLinkedService(LinkedService): :type annotations: list[object] :param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: - string (or Expression with resultType string). Possible values include: "Online", - "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + string (or Expression with resultType string). + :type deployment_type: object :param host_name: The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). @@ -14522,10 +14821,8 @@ class DynamicsCrmLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or - Expression with resultType string). Possible values include: "Office365", "Ifd", - "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + Expression with resultType string). + :type authentication_type: object :param username: User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). :type username: object @@ -14536,10 +14833,8 @@ class DynamicsCrmLinkedService(LinkedService): :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -14565,16 +14860,16 @@ class DynamicsCrmLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -14582,8 +14877,8 @@ class DynamicsCrmLinkedService(LinkedService): def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], + deployment_type: object, + authentication_type: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, @@ -14596,7 +14891,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential_type: Optional[object] = None, service_principal_credential: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -14642,6 +14937,9 @@ class DynamicsCrmSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior @@ -14667,6 +14965,7 @@ class DynamicsCrmSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -14682,11 +14981,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, ignore_null_values: Optional[object] = None, alternate_key_name: Optional[object] = None, **kwargs ): - super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsCrmSink' # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values @@ -14712,6 +15012,9 @@ class DynamicsCrmSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object @@ -14730,6 +15033,7 @@ class DynamicsCrmSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -14741,11 +15045,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsCrmSource' # type: str self.query = query self.additional_columns = additional_columns @@ -14840,8 +15145,8 @@ class DynamicsLinkedService(LinkedService): :type annotations: list[object] :param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or - Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd". - :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType + Expression with resultType string). + :type deployment_type: object :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). :type host_name: object @@ -14859,9 +15164,8 @@ class DynamicsLinkedService(LinkedService): :param authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with - resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal". - :type authentication_type: str or - ~data_factory_management_client.models.DynamicsAuthenticationType + resultType string). + :type authentication_type: object :param username: User name to access the Dynamics instance. Type: string (or Expression with resultType string). :type username: object @@ -14872,10 +15176,8 @@ class DynamicsLinkedService(LinkedService): :type service_principal_id: object :param service_principal_credential_type: The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType + for certificate. Type: string (or Expression with resultType string). + :type service_principal_credential_type: str :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -14901,12 +15203,12 @@ class DynamicsLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, + 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'}, 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, 'port': {'key': 'typeProperties.port', 'type': 'object'}, 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, - 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, @@ -14918,8 +15220,8 @@ class DynamicsLinkedService(LinkedService): def __init__( self, *, - deployment_type: Union[str, "DynamicsDeploymentType"], - authentication_type: Union[str, "DynamicsAuthenticationType"], + deployment_type: object, + authentication_type: object, additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, @@ -14932,7 +15234,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential_type: Optional[str] = None, service_principal_credential: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -14978,6 +15280,9 @@ class DynamicsSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: Required. The write behavior for the operation. Possible values include: "Upsert". :type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior @@ -15003,6 +15308,7 @@ class DynamicsSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, 'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'}, @@ -15018,11 +15324,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, ignore_null_values: Optional[object] = None, alternate_key_name: Optional[object] = None, **kwargs ): - super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsSink' # type: str self.write_behavior = write_behavior self.ignore_null_values = ignore_null_values @@ -15048,6 +15355,9 @@ class DynamicsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object @@ -15066,6 +15376,7 @@ class DynamicsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -15077,11 +15388,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'DynamicsSource' # type: str self.query = query self.additional_columns = additional_columns @@ -15265,6 +15577,9 @@ class EloquaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -15286,6 +15601,7 @@ class EloquaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -15298,12 +15614,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'EloquaSource' # type: str self.query = query @@ -15358,7 +15675,7 @@ class EntityReference(msrest.serialization.Model): """The entity reference. :param type: The type of this referenced entity. Possible values include: - "IntegrationRuntimeReference", "LinkedServiceReference". + "IntegrationRuntimeReference", "LinkedServiceReference", "CredentialReference". :type type: str or ~data_factory_management_client.models.IntegrationRuntimeEntityReferenceType :param reference_name: The name of this referenced entity. :type reference_name: str @@ -15448,9 +15765,12 @@ class ExcelDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the excel storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType + :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType string). :type sheet_name: object + :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or + Expression with resultType integer). + :type sheet_index: object :param range: The partial data of one sheet. Type: string (or Expression with resultType string). :type range: object @@ -15481,6 +15801,7 @@ class ExcelDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, + 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'}, 'range': {'key': 'typeProperties.range', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, @@ -15500,6 +15821,7 @@ def __init__( folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, sheet_name: Optional[object] = None, + sheet_index: Optional[object] = None, range: Optional[object] = None, first_row_as_header: Optional[object] = None, compression: Optional["DatasetCompression"] = None, @@ -15510,6 +15832,7 @@ def __init__( self.type = 'Excel' # type: str self.location = location self.sheet_name = sheet_name + self.sheet_index = sheet_index self.range = range self.first_row_as_header = first_row_as_header self.compression = compression @@ -15535,6 +15858,9 @@ class ExcelSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Excel store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -15552,6 +15878,7 @@ class ExcelSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -15563,11 +15890,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ExcelSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -16614,6 +16942,9 @@ class FileServerReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -16654,6 +16985,7 @@ class FileServerReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -16671,6 +17003,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -16683,7 +17016,7 @@ def __init__( file_filter: Optional[object] = None, **kwargs ): - super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileServerReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -16710,6 +17043,9 @@ class FileServerWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -16722,6 +17058,7 @@ class FileServerWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -16730,10 +17067,11 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, **kwargs ): - super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'FileServerWriteSettings' # type: str @@ -16865,6 +17203,9 @@ class FileSystemSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object """ @@ -16881,6 +17222,7 @@ class FileSystemSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, } @@ -16893,10 +17235,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, **kwargs ): - super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileSystemSink' # type: str self.copy_behavior = copy_behavior @@ -16920,6 +17263,9 @@ class FileSystemSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -16938,6 +17284,7 @@ class FileSystemSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -16949,11 +17296,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FileSystemSource' # type: str self.recursive = recursive self.additional_columns = additional_columns @@ -17104,6 +17452,9 @@ class FtpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -17137,6 +17488,7 @@ class FtpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -17152,6 +17504,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -17162,7 +17515,7 @@ def __init__( use_binary_transfer: Optional[bool] = None, **kwargs ): - super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'FtpReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -17737,6 +18090,9 @@ class GoogleAdWordsSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -17758,6 +18114,7 @@ class GoogleAdWordsSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -17770,12 +18127,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GoogleAdWordsSource' # type: str self.query = query @@ -18004,6 +18362,9 @@ class GoogleBigQuerySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -18025,6 +18386,7 @@ class GoogleBigQuerySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -18037,12 +18399,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GoogleBigQuerySource' # type: str self.query = query @@ -18187,6 +18550,9 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -18227,6 +18593,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -18244,6 +18611,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -18256,7 +18624,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'GoogleCloudStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -18354,6 +18722,9 @@ class GreenplumSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -18375,6 +18746,7 @@ class GreenplumSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -18387,12 +18759,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GreenplumSource' # type: str self.query = query @@ -18679,6 +19052,9 @@ class HBaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -18700,6 +19076,7 @@ class HBaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -18712,12 +19089,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HBaseSource' # type: str self.query = query @@ -18854,6 +19232,9 @@ class HdfsReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -18893,6 +19274,7 @@ class HdfsReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -18910,6 +19292,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -18922,7 +19305,7 @@ def __init__( delete_files_after_completion: Optional[object] = None, **kwargs ): - super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HdfsReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -18955,6 +19338,9 @@ class HdfsSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -18972,6 +19358,7 @@ class HdfsSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } @@ -18983,11 +19370,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, distcp_settings: Optional["DistcpSettings"] = None, **kwargs ): - super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HdfsSource' # type: str self.recursive = recursive self.distcp_settings = distcp_settings @@ -20109,6 +20497,9 @@ class HiveSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -20130,6 +20521,7 @@ class HiveSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -20142,12 +20534,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HiveSource' # type: str self.query = query @@ -20372,6 +20765,9 @@ class HttpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object @@ -20399,6 +20795,7 @@ class HttpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, @@ -20412,6 +20809,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, request_method: Optional[object] = None, request_body: Optional[object] = None, additional_headers: Optional[object] = None, @@ -20420,7 +20818,7 @@ def __init__( partition_root_path: Optional[object] = None, **kwargs ): - super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HttpReadSettings' # type: str self.request_method = request_method self.request_body = request_body @@ -20496,6 +20894,9 @@ class HttpSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: @@ -20513,6 +20914,7 @@ class HttpSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -20523,10 +20925,11 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'HttpSource' # type: str self.http_request_timeout = http_request_timeout @@ -20714,6 +21117,9 @@ class HubspotSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -20735,6 +21141,7 @@ class HubspotSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -20747,12 +21154,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HubspotSource' # type: str self.query = query @@ -21042,6 +21450,9 @@ class ImpalaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -21063,6 +21474,7 @@ class ImpalaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -21075,12 +21487,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ImpalaSource' # type: str self.query = query @@ -21196,6 +21609,9 @@ class InformixSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -21213,6 +21629,7 @@ class InformixSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -21225,10 +21642,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'InformixSink' # type: str self.pre_copy_script = pre_copy_script @@ -21252,6 +21670,9 @@ class InformixSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -21272,6 +21693,7 @@ class InformixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -21284,12 +21706,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'InformixSource' # type: str self.query = query @@ -22031,6 +22454,8 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): list[~data_factory_management_client.models.CustomSetupBase] :param package_stores: Package stores for the SSIS Integration Runtime. :type package_stores: list[~data_factory_management_client.models.PackageStore] + :param managed_credential: The user-assigned managed identity reference. + :type managed_credential: ~data_factory_management_client.models.EntityReference """ _attribute_map = { @@ -22042,6 +22467,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model): 'edition': {'key': 'edition', 'type': 'str'}, 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, + 'managed_credential': {'key': 'managedCredential', 'type': 'EntityReference'}, } def __init__( @@ -22055,6 +22481,7 @@ def __init__( edition: Optional[Union[str, "IntegrationRuntimeEdition"]] = None, express_custom_setup_properties: Optional[List["CustomSetupBase"]] = None, package_stores: Optional[List["PackageStore"]] = None, + managed_credential: Optional["EntityReference"] = None, **kwargs ): super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) @@ -22066,6 +22493,7 @@ def __init__( self.edition = edition self.express_custom_setup_properties = express_custom_setup_properties self.package_stores = package_stores + self.managed_credential = managed_credential class IntegrationRuntimeStatus(msrest.serialization.Model): @@ -22411,6 +22839,9 @@ class JiraSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -22432,6 +22863,7 @@ class JiraSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -22444,12 +22876,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'JiraSource' # type: str self.query = query @@ -22551,9 +22984,8 @@ class JsonFormat(DatasetStorageFormat): :param deserializer: Deserializer. Type: string (or Expression with resultType string). :type deserializer: object :param file_pattern: File pattern of JSON. To be more specific, the way of separating a - collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~data_factory_management_client.models.JsonFormatFilePattern + collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: object :param nesting_separator: The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). :type nesting_separator: object @@ -22583,7 +23015,7 @@ class JsonFormat(DatasetStorageFormat): 'type': {'key': 'type', 'type': 'str'}, 'serializer': {'key': 'serializer', 'type': 'object'}, 'deserializer': {'key': 'deserializer', 'type': 'object'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'encoding_name': {'key': 'encodingName', 'type': 'object'}, 'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'}, @@ -22596,7 +23028,7 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, serializer: Optional[object] = None, deserializer: Optional[object] = None, - file_pattern: Optional[Union[str, "JsonFormatFilePattern"]] = None, + file_pattern: Optional[object] = None, nesting_separator: Optional[object] = None, encoding_name: Optional[object] = None, json_node_reference: Optional[object] = None, @@ -22673,6 +23105,9 @@ class JsonSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Json store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: Json format settings. @@ -22691,6 +23126,7 @@ class JsonSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'}, } @@ -22704,11 +23140,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["JsonWriteSettings"] = None, **kwargs ): - super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'JsonSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -22733,6 +23170,9 @@ class JsonSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Json store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: Json format settings. @@ -22752,6 +23192,7 @@ class JsonSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -22764,12 +23205,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["JsonReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'JsonSource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -22787,9 +23229,8 @@ class JsonWriteSettings(FormatWriteSettings): :param type: Required. The write setting type.Constant filled by server. :type type: str :param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON - objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible - values include: "setOfObjects", "arrayOfObjects". - :type file_pattern: str or ~data_factory_management_client.models.JsonWriteFilePattern + objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + :type file_pattern: object """ _validation = { @@ -22799,14 +23240,14 @@ class JsonWriteSettings(FormatWriteSettings): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'file_pattern': {'key': 'filePattern', 'type': 'str'}, + 'file_pattern': {'key': 'filePattern', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - file_pattern: Optional[Union[str, "JsonWriteFilePattern"]] = None, + file_pattern: Optional[object] = None, **kwargs ): super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) @@ -23504,6 +23945,9 @@ class MagentoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -23525,6 +23969,7 @@ class MagentoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -23537,12 +23982,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MagentoSource' # type: str self.query = query @@ -24245,6 +24691,9 @@ class MariaDbSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -24266,6 +24715,7 @@ class MariaDbSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -24278,12 +24728,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(MariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MariaDBSource' # type: str self.query = query @@ -24533,6 +24984,9 @@ class MarketoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -24554,6 +25008,7 @@ class MarketoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -24566,16 +25021,43 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MarketoSource' # type: str self.query = query +class MetadataItem(msrest.serialization.Model): + """Specify the name and value of custom metadata item. + + :param name: Metadata item key name. Type: string (or Expression with resultType string). + :type name: object + :param value: Metadata item value. Type: string (or Expression with resultType string). + :type value: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__( + self, + *, + name: Optional[object] = None, + value: Optional[object] = None, + **kwargs + ): + super(MetadataItem, self).__init__(**kwargs) + self.name = name + self.value = value + + class MicrosoftAccessLinkedService(LinkedService): """Microsoft Access linked service. @@ -24687,6 +25169,9 @@ class MicrosoftAccessSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -24704,6 +25189,7 @@ class MicrosoftAccessSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -24716,10 +25202,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MicrosoftAccessSink' # type: str self.pre_copy_script = pre_copy_script @@ -24743,6 +25230,9 @@ class MicrosoftAccessSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -24760,6 +25250,7 @@ class MicrosoftAccessSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -24771,11 +25262,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MicrosoftAccessSource' # type: str self.query = query self.additional_columns = additional_columns @@ -24982,6 +25474,74 @@ def __init__( self.database = database +class MongoDbAtlasSink(CopySink): + """A copy activity MongoDB Atlas sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, + write_behavior: Optional[object] = None, + **kwargs + ): + super(MongoDbAtlasSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) + self.type = 'MongoDbAtlasSink' # type: str + self.write_behavior = write_behavior + + class MongoDbAtlasSource(CopySource): """A copy activity source for a MongoDB Atlas database. @@ -25001,6 +25561,9 @@ class MongoDbAtlasSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). @@ -25030,6 +25593,7 @@ class MongoDbAtlasSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, @@ -25044,6 +25608,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, filter: Optional[object] = None, cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, @@ -25051,7 +25616,7 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbAtlasSource' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -25308,6 +25873,9 @@ class MongoDbSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object @@ -25326,6 +25894,7 @@ class MongoDbSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -25337,11 +25906,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbSource' # type: str self.query = query self.additional_columns = additional_columns @@ -25478,6 +26048,74 @@ def __init__( self.database = database +class MongoDbV2Sink(CopySink): + """A copy activity MongoDB sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object + :param write_behavior: Specifies whether the document with same key to be overwritten (upsert) + rather than throw exception (insert). The default value is "insert". Type: string (or + Expression with resultType string). Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, + write_behavior: Optional[object] = None, + **kwargs + ): + super(MongoDbV2Sink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) + self.type = 'MongoDbV2Sink' # type: str + self.write_behavior = write_behavior + + class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. @@ -25497,6 +26135,9 @@ class MongoDbV2Source(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param filter: Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). @@ -25526,6 +26167,7 @@ class MongoDbV2Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, @@ -25540,6 +26182,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, filter: Optional[object] = None, cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, @@ -25547,7 +26190,7 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'MongoDbV2Source' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -25640,6 +26283,9 @@ class MySqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -25660,6 +26306,7 @@ class MySqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -25672,12 +26319,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MySqlSource' # type: str self.query = query @@ -25870,6 +26518,9 @@ class NetezzaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -25896,6 +26547,7 @@ class NetezzaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -25910,6 +26562,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, @@ -25917,7 +26570,7 @@ def __init__( partition_settings: Optional["NetezzaPartitionSettings"] = None, **kwargs ): - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'NetezzaSource' # type: str self.query = query self.partition_option = partition_option @@ -26231,6 +26884,9 @@ class ODataSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -26254,6 +26910,7 @@ class ODataSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -26266,12 +26923,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ODataSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -26388,6 +27046,9 @@ class OdbcSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: A query to execute before starting the copy. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -26405,6 +27066,7 @@ class OdbcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -26417,10 +27079,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OdbcSink' # type: str self.pre_copy_script = pre_copy_script @@ -26444,6 +27107,9 @@ class OdbcSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -26464,6 +27130,7 @@ class OdbcSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -26476,12 +27143,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'OdbcSource' # type: str self.query = query @@ -26730,6 +27398,9 @@ class Office365Source(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param allowed_groups: The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). :type allowed_groups: object @@ -26761,6 +27432,7 @@ class Office365Source(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'allowed_groups': {'key': 'allowedGroups', 'type': 'object'}, 'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'}, 'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'}, @@ -26776,6 +27448,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, allowed_groups: Optional[object] = None, user_scope_filter_uri: Optional[object] = None, date_filter_column: Optional[object] = None, @@ -26784,7 +27457,7 @@ def __init__( output_columns: Optional[object] = None, **kwargs ): - super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'Office365Source' # type: str self.allowed_groups = allowed_groups self.user_scope_filter_uri = user_scope_filter_uri @@ -27215,6 +27888,9 @@ class OracleCloudStorageReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -27255,6 +27931,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -27272,6 +27949,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -27284,7 +27962,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleCloudStorageReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -27585,6 +28263,9 @@ class OracleServiceCloudSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -27606,6 +28287,7 @@ class OracleServiceCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -27618,12 +28300,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'OracleServiceCloudSource' # type: str self.query = query @@ -27653,6 +28336,9 @@ class OracleSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -27670,6 +28356,7 @@ class OracleSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, } @@ -27682,10 +28369,11 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, **kwargs ): - super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleSink' # type: str self.pre_copy_script = pre_copy_script @@ -27709,6 +28397,9 @@ class OracleSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType string). :type oracle_reader_query: object @@ -27735,6 +28426,7 @@ class OracleSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'object'}, @@ -27749,6 +28441,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, oracle_reader_query: Optional[object] = None, query_timeout: Optional[object] = None, partition_option: Optional[object] = None, @@ -27756,7 +28449,7 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OracleSource' # type: str self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout @@ -27875,8 +28568,9 @@ class OrcDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". - :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec + :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with + resultType string). + :type orc_compression_codec: object """ _validation = { @@ -27895,7 +28589,7 @@ class OrcDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'}, + 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'}, } def __init__( @@ -27910,7 +28604,7 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - orc_compression_codec: Optional[Union[str, "OrcCompressionCodec"]] = None, + orc_compression_codec: Optional[object] = None, **kwargs ): super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -27983,6 +28677,9 @@ class OrcSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: ORC format settings. @@ -28001,6 +28698,7 @@ class OrcSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } @@ -28014,11 +28712,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["OrcWriteSettings"] = None, **kwargs ): - super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OrcSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -28043,6 +28742,9 @@ class OrcSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -28060,6 +28762,7 @@ class OrcSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -28071,11 +28774,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'OrcSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -28220,9 +28924,9 @@ class ParquetDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the parquet storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4", "tar", "tarGZip". - :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec + :param compression_codec: The data compressionCodec. Type: string (or Expression with + resultType string). + :type compression_codec: object """ _validation = { @@ -28241,7 +28945,7 @@ class ParquetDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } def __init__( @@ -28256,7 +28960,7 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - compression_codec: Optional[Union[str, "CompressionCodec"]] = None, + compression_codec: Optional[object] = None, **kwargs ): super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -28329,6 +29033,9 @@ class ParquetSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings :param format_settings: Parquet format settings. @@ -28347,6 +29054,7 @@ class ParquetSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } @@ -28360,11 +29068,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, format_settings: Optional["ParquetWriteSettings"] = None, **kwargs ): - super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ParquetSink' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -28389,6 +29098,9 @@ class ParquetSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -28406,6 +29118,7 @@ class ParquetSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -28417,11 +29130,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'ParquetSource' # type: str self.store_settings = store_settings self.additional_columns = additional_columns @@ -28648,6 +29362,9 @@ class PaypalSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -28669,6 +29386,7 @@ class PaypalSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -28681,12 +29399,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PaypalSource' # type: str self.query = query @@ -28915,6 +29634,9 @@ class PhoenixSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -28936,6 +29658,7 @@ class PhoenixSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -28948,12 +29671,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PhoenixSource' # type: str self.query = query @@ -29476,6 +30200,9 @@ class PostgreSqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -29496,6 +30223,7 @@ class PostgreSqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -29508,12 +30236,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PostgreSqlSource' # type: str self.query = query @@ -29832,6 +30561,9 @@ class PrestoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -29853,6 +30585,7 @@ class PrestoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -29865,12 +30598,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PrestoSource' # type: str self.query = query @@ -30357,6 +31091,9 @@ class QuickBooksSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -30378,6 +31115,7 @@ class QuickBooksSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -30390,12 +31128,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'QuickBooksSource' # type: str self.query = query @@ -30578,6 +31317,9 @@ class RelationalSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -30595,6 +31337,7 @@ class RelationalSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -30606,11 +31349,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RelationalSource' # type: str self.query = query self.additional_columns = additional_columns @@ -30975,6 +31719,9 @@ class ResponsysSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -30996,6 +31743,7 @@ class ResponsysSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -31008,12 +31756,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ResponsysSource' # type: str self.query = query @@ -31258,6 +32007,9 @@ class RestSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: string (or Expression with resultType string). :type request_method: object @@ -31288,6 +32040,7 @@ class RestSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, @@ -31304,6 +32057,7 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, request_method: Optional[object] = None, additional_headers: Optional[object] = None, http_request_timeout: Optional[object] = None, @@ -31311,7 +32065,7 @@ def __init__( http_compression_type: Optional[object] = None, **kwargs ): - super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RestSink' # type: str self.request_method = request_method self.additional_headers = additional_headers @@ -31339,6 +32093,9 @@ class RestSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param request_method: The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). :type request_method: object @@ -31373,6 +32130,7 @@ class RestSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'request_method': {'key': 'requestMethod', 'type': 'object'}, 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, @@ -31389,6 +32147,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, request_method: Optional[object] = None, request_body: Optional[object] = None, additional_headers: Optional[object] = None, @@ -31398,7 +32157,7 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'RestSource' # type: str self.request_method = request_method self.request_body = request_body @@ -31837,6 +32596,9 @@ class SalesforceMarketingCloudSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -31858,6 +32620,7 @@ class SalesforceMarketingCloudSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -31870,12 +32633,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SalesforceMarketingCloudSource' # type: str self.query = query @@ -32133,6 +32897,9 @@ class SalesforceServiceCloudSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". :type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior @@ -32160,6 +32927,7 @@ class SalesforceServiceCloudSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -32174,12 +32942,13 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None, external_id_field_name: Optional[object] = None, ignore_null_values: Optional[object] = None, **kwargs ): - super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceServiceCloudSink' # type: str self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name @@ -32205,6 +32974,9 @@ class SalesforceServiceCloudSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values @@ -32225,6 +32997,7 @@ class SalesforceServiceCloudSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -32237,12 +33010,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceServiceCloudSource' # type: str self.query = query self.read_behavior = read_behavior @@ -32274,6 +33048,9 @@ class SalesforceSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is Insert. Possible values include: "Insert", "Upsert". :type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior @@ -32301,6 +33078,7 @@ class SalesforceSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'}, 'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'}, @@ -32315,12 +33093,13 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None, external_id_field_name: Optional[object] = None, ignore_null_values: Optional[object] = None, **kwargs ): - super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SalesforceSink' # type: str self.write_behavior = write_behavior self.external_id_field_name = external_id_field_name @@ -32346,6 +33125,9 @@ class SalesforceSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -32369,6 +33151,7 @@ class SalesforceSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -32382,13 +33165,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, **kwargs ): - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SalesforceSource' # type: str self.query = query self.read_behavior = read_behavior @@ -32562,6 +33346,9 @@ class SapBwSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -32582,6 +33369,7 @@ class SapBwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -32594,12 +33382,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapBwSource' # type: str self.query = query @@ -32772,6 +33561,9 @@ class SapCloudForCustomerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible values include: "Insert", "Update". :type write_behavior: str or @@ -32795,6 +33587,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -32808,11 +33601,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, write_behavior: Optional[Union[str, "SapCloudForCustomerSinkWriteBehavior"]] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SapCloudForCustomerSink' # type: str self.write_behavior = write_behavior self.http_request_timeout = http_request_timeout @@ -32837,6 +33631,9 @@ class SapCloudForCustomerSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -32863,6 +33660,7 @@ class SapCloudForCustomerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -32876,13 +33674,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapCloudForCustomerSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -33050,6 +33849,9 @@ class SapEccSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -33076,6 +33878,7 @@ class SapEccSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -33089,13 +33892,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapEccSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -33226,6 +34030,9 @@ class SapHanaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -33255,6 +34062,7 @@ class SapHanaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -33270,6 +34078,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, @@ -33278,7 +34087,7 @@ def __init__( partition_settings: Optional["SapHanaPartitionSettings"] = None, **kwargs ): - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapHanaSource' # type: str self.query = query self.packet_size = packet_size @@ -33495,6 +34304,9 @@ class SapOpenHubSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -33527,6 +34339,7 @@ class SapOpenHubSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, @@ -33542,6 +34355,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, exclude_last_request: Optional[object] = None, @@ -33550,7 +34364,7 @@ def __init__( sap_data_column_delimiter: Optional[object] = None, **kwargs ): - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapOpenHubSource' # type: str self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id @@ -33917,6 +34731,9 @@ class SapTableSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -33964,6 +34781,7 @@ class SapTableSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, @@ -33984,6 +34802,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, row_count: Optional[object] = None, @@ -33997,7 +34816,7 @@ def __init__( partition_settings: Optional["SapTablePartitionSettings"] = None, **kwargs ): - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapTableSource' # type: str self.row_count = row_count self.row_skips = row_skips @@ -34128,9 +34947,8 @@ class ScriptAction(msrest.serialization.Model): :type name: str :param uri: Required. The URI for the script action. :type uri: str - :param roles: Required. The node types on which the script action should be executed. Possible - values include: "Headnode", "Workernode", "Zookeeper". - :type roles: str or ~data_factory_management_client.models.HdiNodeTypes + :param roles: Required. The node types on which the script action should be executed. + :type roles: str :param parameters: The parameters for the script action. :type parameters: str """ @@ -34153,7 +34971,7 @@ def __init__( *, name: str, uri: str, - roles: Union[str, "HdiNodeTypes"], + roles: str, parameters: Optional[str] = None, **kwargs ): @@ -34732,6 +35550,9 @@ class ServiceNowSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -34753,6 +35574,7 @@ class ServiceNowSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -34765,12 +35587,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ServiceNowSource' # type: str self.query = query @@ -34887,6 +35710,9 @@ class SftpReadSettings(StoreReadSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object @@ -34924,6 +35750,7 @@ class SftpReadSettings(StoreReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, @@ -34940,6 +35767,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, @@ -34951,7 +35779,7 @@ def __init__( modified_datetime_end: Optional[object] = None, **kwargs ): - super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SftpReadSettings' # type: str self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path @@ -35095,6 +35923,9 @@ class SftpWriteSettings(StoreWriteSettings): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default @@ -35114,6 +35945,7 @@ class SftpWriteSettings(StoreWriteSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, @@ -35124,12 +35956,13 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, copy_behavior: Optional[object] = None, operation_timeout: Optional[object] = None, use_temp_file_rename: Optional[object] = None, **kwargs ): - super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs) self.type = 'SftpWriteSettings' # type: str self.operation_timeout = operation_timeout self.use_temp_file_rename = use_temp_file_rename @@ -35308,6 +36141,9 @@ class SharePointOnlineListSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -35327,6 +36163,7 @@ class SharePointOnlineListSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } @@ -35338,11 +36175,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, http_request_timeout: Optional[object] = None, **kwargs ): - super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SharePointOnlineListSource' # type: str self.query = query self.http_request_timeout = http_request_timeout @@ -35520,6 +36358,9 @@ class ShopifySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -35541,6 +36382,7 @@ class ShopifySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -35553,12 +36395,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ShopifySource' # type: str self.query = query @@ -35851,6 +36694,9 @@ class SnowflakeSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -35870,6 +36716,7 @@ class SnowflakeSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, } @@ -35883,11 +36730,12 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, import_settings: Optional["SnowflakeImportCopyCommand"] = None, **kwargs ): - super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SnowflakeSink' # type: str self.pre_copy_script = pre_copy_script self.import_settings = import_settings @@ -35912,6 +36760,9 @@ class SnowflakeSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query: Snowflake Sql query. Type: string (or Expression with resultType string). :type query: object :param export_settings: Snowflake export settings. @@ -35928,6 +36779,7 @@ class SnowflakeSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, } @@ -35939,11 +36791,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query: Optional[object] = None, export_settings: Optional["SnowflakeExportCopyCommand"] = None, **kwargs ): - super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SnowflakeSource' # type: str self.query = query self.export_settings = export_settings @@ -36184,6 +37037,9 @@ class SparkSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -36205,6 +37061,7 @@ class SparkSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -36217,12 +37074,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SparkSource' # type: str self.query = query @@ -36294,6 +37152,9 @@ class SqlDwSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType string). :type pre_copy_script: object @@ -36325,6 +37186,7 @@ class SqlDwSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, @@ -36342,6 +37204,7 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, pre_copy_script: Optional[object] = None, allow_poly_base: Optional[object] = None, poly_base_settings: Optional["PolybaseSettings"] = None, @@ -36350,7 +37213,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SqlDwSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlDwSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlDWSink' # type: str self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base @@ -36379,6 +37242,9 @@ class SqlDwSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -36413,6 +37279,7 @@ class SqlDwSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -36429,6 +37296,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, sql_reader_query: Optional[object] = None, @@ -36438,7 +37306,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlDwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlDwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlDWSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36472,6 +37340,9 @@ class SqlMiSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -36504,6 +37375,7 @@ class SqlMiSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -36521,6 +37393,7 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, sql_writer_stored_procedure_name: Optional[object] = None, sql_writer_table_type: Optional[object] = None, pre_copy_script: Optional[object] = None, @@ -36529,7 +37402,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlMISink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -36558,6 +37431,9 @@ class SqlMiSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -36593,6 +37469,7 @@ class SqlMiSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -36610,6 +37487,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, sql_reader_query: Optional[object] = None, @@ -36620,7 +37498,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlMISource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36772,6 +37650,9 @@ class SqlServerSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -36804,6 +37685,7 @@ class SqlServerSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -36821,6 +37703,7 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, sql_writer_stored_procedure_name: Optional[object] = None, sql_writer_table_type: Optional[object] = None, pre_copy_script: Optional[object] = None, @@ -36829,7 +37712,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlServerSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -36858,6 +37741,9 @@ class SqlServerSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -36893,6 +37779,7 @@ class SqlServerSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -36910,6 +37797,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, sql_reader_query: Optional[object] = None, @@ -36920,7 +37808,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlServerSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -37106,6 +37994,9 @@ class SqlSink(CopySink): :param max_concurrent_connections: The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or Expression with resultType string). :type sql_writer_stored_procedure_name: object @@ -37138,6 +38029,7 @@ class SqlSink(CopySink): 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'}, 'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'}, 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, @@ -37155,6 +38047,7 @@ def __init__( sink_retry_count: Optional[object] = None, sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, sql_writer_stored_procedure_name: Optional[object] = None, sql_writer_table_type: Optional[object] = None, pre_copy_script: Optional[object] = None, @@ -37163,7 +38056,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'SqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -37192,6 +38085,9 @@ class SqlSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -37229,6 +38125,7 @@ class SqlSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, @@ -37246,6 +38143,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, sql_reader_query: Optional[object] = None, @@ -37256,7 +38154,7 @@ def __init__( partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -37453,6 +38351,9 @@ class SquareSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -37474,6 +38375,7 @@ class SquareSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -37486,12 +38388,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SquareSource' # type: str self.query = query @@ -38555,6 +39458,9 @@ class SybaseSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -38575,6 +39481,7 @@ class SybaseSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -38587,12 +39494,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SybaseSource' # type: str self.query = query @@ -38955,6 +39863,9 @@ class TeradataSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -38981,6 +39892,7 @@ class TeradataSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -38995,6 +39907,7 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, @@ -39002,7 +39915,7 @@ def __init__( partition_settings: Optional["TeradataPartitionSettings"] = None, **kwargs ): - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'TeradataSource' # type: str self.query = query self.partition_option = partition_option @@ -40159,6 +41072,9 @@ class VerticaSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -40180,6 +41096,7 @@ class VerticaSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -40192,12 +41109,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'VerticaSource' # type: str self.query = query @@ -40833,6 +41751,9 @@ class WebSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param additional_columns: Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] @@ -40848,6 +41769,7 @@ class WebSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -40858,10 +41780,11 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'WebSource' # type: str self.additional_columns = additional_columns @@ -41125,6 +42048,9 @@ class XeroSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -41146,6 +42072,7 @@ class XeroSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -41158,12 +42085,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'XeroSource' # type: str self.query = query @@ -41336,6 +42264,9 @@ class XmlSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param store_settings: Xml store settings. :type store_settings: ~data_factory_management_client.models.StoreReadSettings :param format_settings: Xml format settings. @@ -41355,6 +42286,7 @@ class XmlSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -41367,12 +42299,13 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["XmlReadSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs) self.type = 'XmlSource' # type: str self.store_settings = store_settings self.format_settings = format_settings @@ -41592,6 +42525,9 @@ class ZohoSource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param disable_metrics_collection: If true, disable data store metrics collection. Default is + false. Type: boolean (or Expression with resultType boolean). + :type disable_metrics_collection: object :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object @@ -41613,6 +42549,7 @@ class ZohoSource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, @@ -41625,11 +42562,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + disable_metrics_collection: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ZohoSource' # type: str self.query = query diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py deleted file mode 100644 index 192e09232ad..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py +++ /dev/null @@ -1,132 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ActivityRunOperations(object): - """ActivityRunOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def query_by_pipeline_run( - self, - resource_group_name, # type: str - factory_name, # type: str - run_id, # type: str - last_updated_after, # type: datetime.datetime - last_updated_before, # type: datetime.datetime - continuation_token_parameter=None, # type: Optional[str] - filters=None, # type: Optional[List["models.RunQueryFilter"]] - order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] - **kwargs # type: Any - ): - # type: (...) -> "models.ActivityRunsQueryResponse" - """Query activity runs based on input filter conditions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ActivityRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_pipeline_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py deleted file mode 100644 index e0bd3be1783..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py +++ /dev/null @@ -1,317 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class DataFlowOperations(object): - """DataFlowOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - data_flow_name, # type: str - properties, # type: "models.DataFlow" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.DataFlowResource" - """Creates or updates a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow - :param if_match: ETag of the data flow entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - data_flow = models.DataFlowResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(data_flow, 'DataFlowResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DataFlowResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - data_flow_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.DataFlowResource" - """Gets a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the - ETag matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataFlowResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DataFlowResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DataFlowResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - data_flow_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a data flow. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param data_flow_name: The data flow name. - :type data_flow_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.DataFlowListResponse"] - """Lists data flows. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataFlowListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DataFlowListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('DataFlowListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py deleted file mode 100644 index 2f866416c74..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py +++ /dev/null @@ -1,319 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class DatasetOperations(object): - """DatasetOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.DatasetListResponse"] - """Lists datasets. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DatasetListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DatasetListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('DatasetListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - dataset_name, # type: str - properties, # type: "models.Dataset" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.DatasetResource" - """Creates or updates a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :param properties: Dataset properties. - :type properties: ~data_factory_management_client.models.Dataset - :param if_match: ETag of the dataset entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - dataset = models.DatasetResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(dataset, 'DatasetResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('DatasetResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - dataset_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.DatasetResource"] - """Gets a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatasetResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.DatasetResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('DatasetResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - dataset_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a dataset. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param dataset_name: The dataset name. - :type dataset_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py deleted file mode 100644 index 5b8622e97f9..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py +++ /dev/null @@ -1,671 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class FactoryOperations(object): - """FactoryOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.FactoryListResponse"] - """Lists factories under the specified subscription. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore - - def configure_factory_repo( - self, - location_id, # type: str - factory_resource_id=None, # type: Optional[str] - repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"] - **kwargs # type: Any - ): - # type: (...) -> "models.Factory" - """Updates a factory's repo information. - - :param location_id: The location identifier. - :type location_id: str - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.configure_factory_repo.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'locationId': self._serialize.url("location_id", location_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore - - def list_by_resource_group( - self, - resource_group_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.FactoryListResponse"] - """Lists factories. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FactoryListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('FactoryListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - if_match=None, # type: Optional[str] - location=None, # type: Optional[str] - tags=None, # type: Optional[Dict[str, str]] - identity=None, # type: Optional["models.FactoryIdentity"] - repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"] - global_parameters=None, # type: Optional[Dict[str, "models.GlobalParameterSpecification"]] - **kwargs # type: Any - ): - # type: (...) -> "models.Factory" - """Creates or updates a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param if_match: ETag of the factory entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory, 'Factory') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - def update( - self, - resource_group_name, # type: str - factory_name, # type: str - tags=None, # type: Optional[Dict[str, str]] - identity=None, # type: Optional["models.FactoryIdentity"] - **kwargs # type: Any - ): - # type: (...) -> "models.Factory" - """Updates a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.Factory"] - """Gets a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: Factory, or the result of cls(response) - :rtype: ~data_factory_management_client.models.Factory or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('Factory', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a factory. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore - - def get_git_hub_access_token( - self, - resource_group_name, # type: str - factory_name, # type: str - git_hub_access_code, # type: str - git_hub_access_token_base_url, # type: str - git_hub_client_id=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.GitHubAccessTokenResponse" - """Get GitHub Access Token. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param git_hub_access_code: GitHub access code. - :type git_hub_access_code: str - :param git_hub_access_token_base_url: GitHub access token base URL. - :type git_hub_access_token_base_url: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: GitHubAccessTokenResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_git_hub_access_token.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore - - def get_data_plane_access( - self, - resource_group_name, # type: str - factory_name, # type: str - permissions=None, # type: Optional[str] - access_resource_path=None, # type: Optional[str] - profile_name=None, # type: Optional[str] - start_time=None, # type: Optional[str] - expire_time=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.AccessPolicyResponse" - """Get Data Plane access. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param permissions: The string with permissions for Data Plane access. Currently only 'r' is - supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to factory. Currently - only empty string is supported which corresponds to the factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default is supported. The - default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for the token is eight - hours and by default the token will expire in eight hours. - :type expire_time: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AccessPolicyResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.AccessPolicyResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.get_data_plane_access.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(policy, 'UserAccessPolicy') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AccessPolicyResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py deleted file mode 100644 index a7903633080..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py +++ /dev/null @@ -1,309 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeNodeOperations(object): - """IntegrationRuntimeNodeOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - node_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.SelfHostedIntegrationRuntimeNode" - """Gets a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - node_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - def update( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - node_name, # type: str - concurrent_jobs_limit=None, # type: Optional[int] - **kwargs # type: Any - ): - # type: (...) -> "models.SelfHostedIntegrationRuntimeNode" - """Updates a self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration - runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int - :keyword callable cls: A custom type or function that will be passed the direct response - :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) - :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore - - def get_ip_address( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - node_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeNodeIpAddress" - """Get the IP address of self-hosted integration runtime node. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param node_name: The integration runtime node name. - :type node_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_ip_address.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py deleted file mode 100644 index 1fb5fc6b30d..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py +++ /dev/null @@ -1,1198 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class IntegrationRuntimeOperations(object): - """IntegrationRuntimeOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.IntegrationRuntimeListResponse"] - """Lists integration runtimes. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - properties, # type: "models.IntegrationRuntime" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeResource" - """Creates or updates an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime - :param if_match: ETag of the integration runtime entity. Should only be specified for update, - for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - integration_runtime = models.IntegrationRuntimeResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.IntegrationRuntimeResource"] - """Gets an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param if_none_match: ETag of the integration runtime entity. Should only be specified for get. - If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - def update( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - auto_update=None, # type: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]] - update_delay_offset=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeResource" - """Updates an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param auto_update: Enables or disables the auto-update feature of the self-hosted integration - runtime. See https://go.microsoft.com/fwlink/?linkid=854189. - :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The - integration runtime auto update will happen on that time. - :type update_delay_offset: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore - - def get_status( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeStatusResponse" - """Gets detailed status information for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore - - def get_connection_info( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeConnectionInfo" - """Gets the on-premises integration runtime connection information for encrypting the on-premises - data source credentials. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeConnectionInfo, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_connection_info.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore - - def regenerate_auth_key( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - key_name=None, # type: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]] - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeAuthKeys" - """Regenerates the authentication key for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param key_name: The name of the authentication key to regenerate. - :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.regenerate_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore - - def list_auth_key( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeAuthKeys" - """Retrieves the authentication keys for an integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeAuthKeys, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.list_auth_key.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore - - def _start_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["models.IntegrationRuntimeStatusResponse"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - - def begin_start( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.IntegrationRuntimeStatusResponse"] - """Starts a ManagedReserved type integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore - - def _stop_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - - def begin_stop( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Stops a ManagedReserved type integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore - - def sync_credentials( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Force the integration runtime to synchronize credentials across integration runtime nodes, and - this will override the credentials across all worker nodes with those available on the - dispatcher node. If you already have the latest credential backup file, you should manually - import it (preferred) on any self-hosted integration runtime node than using this API directly. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.sync_credentials.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore - - def get_monitoring_data( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeMonitoringData" - """Get the integration runtime monitoring data, which includes the monitor data for all the nodes - under this integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeMonitoringData, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_monitoring_data.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore - - def upgrade( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Upgrade self-hosted integration runtime to latest version if availability. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.upgrade.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore - - def remove_link( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - linked_factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Remove all linked integration runtimes under specific data factory in a self-hosted integration - runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param linked_factory_name: The data factory name for linked integration runtime. - :type linked_factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.remove_link.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore - - def create_linked_integration_runtime( - self, - resource_group_name, # type: str - factory_name, # type: str - integration_runtime_name, # type: str - name=None, # type: Optional[str] - subscription_id=None, # type: Optional[str] - data_factory_name=None, # type: Optional[str] - data_factory_location=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.IntegrationRuntimeStatusResponse" - """Create a linked integration runtime entry in a shared integration runtime. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param integration_runtime_name: The integration runtime name. - :type integration_runtime_name: str - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked integration runtime belongs - to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked integration runtime - belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the linked integration - runtime belongs to. - :type data_factory_location: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: IntegrationRuntimeStatusResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_linked_integration_runtime.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py deleted file mode 100644 index 7124cb588eb..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py +++ /dev/null @@ -1,320 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class LinkedServiceOperations(object): - """LinkedServiceOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.LinkedServiceListResponse"] - """Lists linked services. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.LinkedServiceListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - linked_service_name, # type: str - properties, # type: "models.LinkedService" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.LinkedServiceResource" - """Creates or updates a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param properties: Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService - :param if_match: ETag of the linkedService entity. Should only be specified for update, for - which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - linked_service = models.LinkedServiceResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(linked_service, 'LinkedServiceResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - linked_service_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.LinkedServiceResource"] - """Gets a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :param if_none_match: ETag of the linked service entity. Should only be specified for get. If - the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: LinkedServiceResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.LinkedServiceResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('LinkedServiceResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - linked_service_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a linked service. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param linked_service_name: The linked service name. - :type linked_service_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py deleted file mode 100644 index 29be0bd0e6d..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py +++ /dev/null @@ -1,344 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ManagedPrivateEndpointOperations(object): - """ManagedPrivateEndpointOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.ManagedPrivateEndpointListResponse"] - """Lists managed private endpoints. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - managed_private_endpoint_name, # type: str - if_match=None, # type: Optional[str] - connection_state=None, # type: Optional["models.ConnectionStateProperties"] - fqdns=None, # type: Optional[List[str]] - group_id=None, # type: Optional[str] - private_link_resource_id=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.ManagedPrivateEndpointResource" - """Creates or updates a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :param if_match: ETag of the managed private endpoint entity. Should only be specified for - update, for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - managed_private_endpoint_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.ManagedPrivateEndpointResource" - """Gets a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for - get. If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedPrivateEndpointResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - managed_private_endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a managed private endpoint. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param managed_private_endpoint_name: Managed private endpoint name. - :type managed_private_endpoint_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py deleted file mode 100644 index fa043ca3e59..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py +++ /dev/null @@ -1,262 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class ManagedVirtualNetworkOperations(object): - """ManagedVirtualNetworkOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.ManagedVirtualNetworkListResponse"] - """Lists managed Virtual Networks. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - properties, # type: "models.ManagedVirtualNetwork" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.ManagedVirtualNetworkResource" - """Creates or updates a managed Virtual Network. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param properties: Managed Virtual Network properties. - :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork - :param if_match: ETag of the managed Virtual Network entity. Should only be specified for - update, for which it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - managed_virtual_network_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.ManagedVirtualNetworkResource" - """Gets a managed Virtual Network. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param managed_virtual_network_name: Managed virtual network name. - :type managed_virtual_network_name: str - :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for - get. If the ETag matches the existing entity tag, or if * was provided, then no content will be - returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ManagedVirtualNetworkResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py deleted file mode 100644 index c5cf3d43f6d..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py +++ /dev/null @@ -1,106 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class OperationOperations(object): - """OperationOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.OperationListResponse"] - """Lists the available Azure Data Factory API operations. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.OperationListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py deleted file mode 100644 index d82f423f2cb..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py +++ /dev/null @@ -1,414 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class PipelineOperations(object): - """PipelineOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.PipelineListResponse"] - """Lists pipelines. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PipelineListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.PipelineListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('PipelineListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - pipeline_name, # type: str - pipeline, # type: "models.PipelineResource" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.PipelineResource" - """Creates or updates a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param pipeline: Pipeline resource definition. - :type pipeline: ~data_factory_management_client.models.PipelineResource - :param if_match: ETag of the pipeline entity. Should only be specified for update, for which - it should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(pipeline, 'PipelineResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - pipeline_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.PipelineResource"] - """Gets a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the - ETag matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('PipelineResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - pipeline_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore - - def create_run( - self, - resource_group_name, # type: str - factory_name, # type: str - pipeline_name, # type: str - reference_pipeline_run_id=None, # type: Optional[str] - is_recovery=None, # type: Optional[bool] - start_activity_name=None, # type: Optional[str] - start_from_failure=None, # type: Optional[bool] - parameters=None, # type: Optional[Dict[str, object]] - **kwargs # type: Any - ): - # type: (...) -> "models.CreateRunResponse" - """Creates a run of a pipeline. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param pipeline_name: The pipeline name. - :type pipeline_name: str - :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the - parameters of the specified run will be used to create a new run. - :type reference_pipeline_run_id: str - :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified - referenced pipeline run and the new run will be grouped under the same groupId. - :type is_recovery: bool - :param start_activity_name: In recovery mode, the rerun will start from this activity. If not - specified, all activities will run. - :type start_activity_name: str - :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed - activities. The property will be used only if startActivityName is not specified. - :type start_from_failure: bool - :param parameters: Parameters of the pipeline run. These parameters will be used only if the - runId is not specified. - :type parameters: dict[str, object] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: CreateRunResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.CreateRunResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_run.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if reference_pipeline_run_id is not None: - query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str') - if is_recovery is not None: - query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') - if start_activity_name is not None: - query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') - if start_from_failure is not None: - query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - if parameters is not None: - body_content = self._serialize.body(parameters, '{object}') - else: - body_content = None - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('CreateRunResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py deleted file mode 100644 index 75634fde5ac..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py +++ /dev/null @@ -1,250 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class PipelineRunOperations(object): - """PipelineRunOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def query_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - last_updated_after, # type: datetime.datetime - last_updated_before, # type: datetime.datetime - continuation_token_parameter=None, # type: Optional[str] - filters=None, # type: Optional[List["models.RunQueryFilter"]] - order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] - **kwargs # type: Any - ): - # type: (...) -> "models.PipelineRunsQueryResponse" - """Query pipeline runs in the factory based on input filter conditions. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - run_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.PipelineRun" - """Get a pipeline run by its run ID. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: PipelineRun, or the result of cls(response) - :rtype: ~data_factory_management_client.models.PipelineRun - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('PipelineRun', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore - - def cancel( - self, - resource_group_name, # type: str - factory_name, # type: str - run_id, # type: str - is_recursive=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> None - """Cancel a pipeline run by its run ID. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current - pipeline. - :type is_recursive: bool - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - if is_recursive is not None: - query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool') - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py deleted file mode 100644 index 142f32f2c31..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py +++ /dev/null @@ -1,895 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class TriggerOperations(object): - """TriggerOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["models.TriggerListResponse"] - """Lists triggers. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either TriggerListResponse or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.TriggerListResponse] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - if not next_link: - # Construct URL - url = self.list_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('TriggerListResponse', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore - - def query_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - continuation_token_parameter=None, # type: Optional[str] - parent_trigger_name=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.TriggerQueryResponse" - """Query triggers. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun - triggers. - :type parent_trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore - - def create_or_update( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - properties, # type: "models.Trigger" - if_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> "models.TriggerResource" - """Creates or updates a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param properties: Properties of the trigger. - :type properties: ~data_factory_management_client.models.Trigger - :param if_match: ETag of the trigger entity. Should only be specified for update, for which it - should match existing entity or can be * for unconditional update. - :type if_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - trigger = models.TriggerResource(properties=properties) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.create_or_update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_match is not None: - header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(trigger, 'TriggerResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - def get( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - if_none_match=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional["models.TriggerResource"] - """Gets a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag - matches the existing entity tag, or if * was provided, then no content will be returned. - :type if_none_match: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerResource, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerResource or None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - if if_none_match is not None: - header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 304]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - def delete( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Deletes a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - - def _subscribe_to_event_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._subscribe_to_event_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - - def begin_subscribe_to_event( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"] - """Subscribe event trigger to events. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._subscribe_to_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - - def get_event_subscription_status( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "models.TriggerSubscriptionOperationStatus" - """Get a trigger's event subscription status. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerSubscriptionOperationStatus, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.get_event_subscription_status.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore - - def _unsubscribe_from_event_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - - def begin_unsubscribe_from_event( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"] - """Unsubscribe event trigger from events. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._unsubscribe_from_event_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - - def _start_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._start_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - - def begin_start( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Starts a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore - - def _stop_initial( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self._stop_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore - - def begin_stop( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Stops a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py deleted file mode 100644 index 3290d8196ab..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py +++ /dev/null @@ -1,248 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class TriggerRunOperations(object): - """TriggerRunOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~data_factory_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def rerun( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - run_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Rerun single trigger instance by runId. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.rerun.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore - - def cancel( - self, - resource_group_name, # type: str - factory_name, # type: str - trigger_name, # type: str - run_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Cancel a single trigger instance by runId. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param run_id: The pipeline run identifier. - :type run_id: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-06-01" - - # Construct URL - url = self.cancel.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'runId': self._serialize.url("run_id", run_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore - - def query_by_factory( - self, - resource_group_name, # type: str - factory_name, # type: str - last_updated_after, # type: datetime.datetime - last_updated_before, # type: datetime.datetime - continuation_token_parameter=None, # type: Optional[str] - filters=None, # type: Optional[List["models.RunQueryFilter"]] - order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] - **kwargs # type: Any - ): - # type: (...) -> "models.TriggerRunsQueryResponse" - """Query trigger runs. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] - :keyword callable cls: A custom type or function that will be passed the direct response - :return: TriggerRunsQueryResponse, or the result of cls(response) - :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} - error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) - api_version = "2018-06-01" - content_type = kwargs.pop("content_type", "application/json") - - # Construct URL - url = self.query_by_factory.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore diff --git a/src/datafactory/gen.zip b/src/datafactory/gen.zip deleted file mode 100644 index 296cd2dfd073b40b800d50721ca0621a581fdee3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 39102 zcmeG_O>^VQak7)RQYs%DR}Q|tG4~+JB~To^~azH*Y@_iR+}^zAY384k^4P z2^s+PmTyaE-+jm-S6AhpUyzIMIp^P`d%ge+2LvdQkaiPoaY+C(-90@$(;w3_xcKI; z{(5~4|9*b)$4mA@{P#=vc7CKEBfINFPS0^8KbY=ZWS0Mxv3x|sD4bL4t2wm>4%SXo z{=4V@emVKmU#y+U+8X=hSEe7G;dnspFrwqII-Wiu*3$mgpO9z%cp7;9>xgW+TjZ7J z27c)GBDf!n{lJMle^ezWg8^ZMLK4uB1~;@@g}N_2mySZ(C6iH?1|+(s+% zl&Xv9V??|WD>DxK@2MM+!1p84^8P!xe|b)E+vci8Fv_PsrK7p1itv zd3Hp0&YA3bG~5|aqWH>w7EoZzo8avjB%H2)odD3KJ%S=*yR6J63gR{i`rE|4rtXLE z__aTxTSw~z{-DkLCbXE~eSRAsiTlUo-FsFP!BpL0cXPw}ktBu{;b_PPD}^=L--cxk ze3$hnshl~$-?vv4L!pr~q@?m`IOQ#!-kJ)5==qbtrB*-iCu1lc^lvL0+i8rVb^L29 zlaGCxqwzqkwU#OIV<&R26LZJBIb*`+la;rcqnYhxtI%{1IsK3b?bvqu#})DW_s(EK zS3Q4>L02T0`OF{ny#6GhV*a|MW8W-Kt}JN_gTxM??8;fK&{_g5$ky05us(h)CR?vd zDm&q%6S{#nMspzC`INl0z3%Of6jU~lpa9IwJD}!oJg^JG9c5KiPr(plEYkG^7-k$Z zD!CjzAW6Id!_1Nm<2uH1U4Jr)m;fDDPEOA&#PtUQrz1=ms}ndQ_xiXp1YJaf3K_sR zz!(m_$nv_!m07tF)`|crt}0*_fCzYKQHm#c18##&WA*$2wEMWSQy~!@IY5sYv`IPM zQJ-n@2nr2ZeKBxG^u%>(7@DOK)F3mC?J<_LfqhVP<-!P9EwmjhhSlO&4YF8h8sN3# zsS@V7YtRw~aP;HY3#b)&XlpcT^_EqyTlHGIc6hY6f7GZ|8~e?}-MxcvO_j$q70qQ#ErZgFPdIXV76xco4jNH^MnS|r_&2>Ob6bGLMd|xgV({p z*ui9E{{bu6ovA$*)$s1hyIPW90W!i!usTyqlECiHjzp;-3Uxly0NfJkT22qNf|wj? z`&NCwU8^1K?jP;#Rd;vyng`96adJq(=uq%sF!C(|q; zIEzD#vYbqTF5B6JIT$AD2#g4D4?lhPT_u{1DSREl=j2DQ&z=ta4*cGR4~YH;+P#8k z1_U2og9D1?^hz(2g)V8226W_7UE~DBKt7ky;6B(gDHykK@Aek)kDUOJg1m)r!4bcj zo`VLFwRKvqqCiLNjrs~6RAzR^1t>rTG0}dgE{k&l@EPbQ!7x>YM_|+?hoB>79Bo5v z?VQDH6cMNav*^GE&6tItD541j+m3qEv&4TE`TV>v0L_M zQq?0Fo)Ck=7@k;Y)-ewDg={}TiZ)p1d3A!rc1hlyKu0pYye?Hf1q6q9n3E)KntFl{ zS_@;4FlvRZr4N+UfK!832wkB;s!JWzy)|}vJcz}FlM>#U=_D}*aR3w^vN}B|sV4S3 zYC#3FKh`SXq0Cn)rG(z3jOK;l#9%Y>{GrBuik&>3bq?kdc%<_gixI7r+ijAuh|FNa zJQUBmbO0JRmcmnZJ)d8^PH7zD=!i8nuBVr+ zvp_(0NC!Rp8bWQfTb7BLAOpP%7A!zAEKWwY>A8?dC$t=?oDf7H;3x1Cfq(8qKg(w0 zBz1p#V-``kgHw>PR&rXW8%w`k{bUNH9kANGTd3 zfmrp(bP+PlAluf@V0hTi1CGQ$I*y0~XC``m0nB>F&gKX+hOZcQXm ztL_)Rlj%|B>OG&|Iz8vGaR|}!!(M0K+Us?w)#^2Ntqyf)>!9P--PW$DyW+L~f599L z^V&a}ifUDg@b#2Vpc@Sv^I&kq%&|x7RhDl|ij(i--d|s!$@<+Jl z2ys00NEhq0p{mQ4{2r{+VmjC&;FsTNSCJD-rW=&9m!UYxl3u-z5Y@nD?> zaw04e6dREDhRJ$bO^hH@ox!viWW<0?vm#7M!Rq6d3CzlFW|&tv&Ki%wBD{+Hj10&G zOO%QVq4a~yeTevr?~3Wjh0N|QLPd;|%9;<;LqLR;AYz5R^%%y@+c5;c zyBWFnrlAwip?^bdbjBZ~e>i>%@t55-A^?&0$wb!fIbXVZ4J+-8Hrg#qMZJct9$BGm z_H40m_6We)r7T`1q)X~53$c@o)yy>CbCPV0z-1c5i()|yBu3aB$Vl55vss1Jf!MNR zw(RM`aa(LmS*a67<^%rhT#C=AZw<2!#BtmS?9771$nX752onJ0W0S^0`7pCGDWm{1 z^)nvg$bK2^T~7tpgz6wpie#!4)o!And3KS1^nOX;BY6qbL~a@_2Ie{lH4#2WeL4me#H7^G9i{{ zdZcg7P4eItAmnwh^u4ljI)vRRur4WuBRAC;JnF#dV}2{9`eh{B#w!>qH87ntsAv8G zs4EJ}lB)80Cau_cayIIYeVETeNb!w)*adZb9Yy2tx3H}Pw@9I(eJpHmaH^3~*hzy= zLn1Yx7&e-K3pM3nv!NGXsqo05(dd2A^hMxlH=XNR;VlzfeO zjOJMFsl1|sDYa@2NpZ29o&w`#Cxw^ z4`q)I#Cc!S_Wq7$T2iGglD<3F4ADfmBnw0n;^iA3DGsb?;z(>(A50N;xCg(X?dGo+Gt>}Rm0nhwl!hvWw1zsm})eD07qSr2fW z26ipvH7(i)^?k=}?(Vg$Ua!@FgsH=(<+N&DYk#-p*1LNLor6Ywv1GL*#|?cTwsfA) z+W;W>!umpO=M;P>w^!6oPJ#Wt{R7+1a3dYT&1`U_&v8Yv3(edLzYRw2apmj}aJo)d zA#Ceec#PZgWKrY9mD<_)=yr!L6fcqOI-X(n>T)cA)YcXD=_pZTh;U9vN8ac30JgB zU_hJ^O29nYM4bCAWC}9Q?KY5=A;fKvXuY|(A^M6x<4xelzI^wcjyR(#J3ooFA7cTm z*SzqG!UWG&uMuSwOem6oyo&caC%P`pavd?$Ol=gFa}w+nj%B#|hKUNc2B&=VYSKi- zMUf=Ng@5upFS_a~n(Bzv&T$oL;T$renqllo5I8tmkuBKz8z=Cb&VbK741XJPqJNKf zb;M9g*3JbBR^KUXl^U^=NmOI>rm3bZgVhqaY*}~0w|^ma_|&R!W`!pP06tq)Dg3(PvZ)* z|NdM^Ye5|^px{&IlC|QD7Ss_}m_l8?R@7NO))5O+x42d`<=4boQkcw(>pBxysUb*0 zIhe|+*l1~b0Rzwy*__z3oTSSQAuT0*^w2yl5MWl^EhV%pvB9Ckk0Ge6q%R%S358#N znRPgqY=&4w#h^~v?l}vv>GRNnD^>+(oOiL#;7*}%Mc+Rb1CvM96LZPwoHP^FcO|U= zNPH_uJdonMA`89^$By^$3{gCiOkT_lS-~k+Sfi}m0)cKx0V|`+V+fAUOFp=Qv#-08 z0TtYWfrIX+6oEuAB_4Y4oLftpy4S}x_o|QP48|m?q|rI!$4a?L7Q)Kh3f2+B0t34B zNGE)fL0h;)Ka>R?(k3AmW@ap$_|B3GT~?pnqN8pbyk-H9odwEI>eCT}E8A=y)px6{ zy}f#?R{s{NAep^@g*aUF@R6if-e?jbJbZ{*WC0k+_-qevupK03gm}K-At2BZ>j?26 zl5gQ?c7zF^x9w-}Kejw)0u)lP;6dr4mNx9>xy@o6l;t%GbW3uYiw?WHIM>Slx@S+I zr7F86d%aV3Ln+x2vJ;k(7E!bdR>IB#HmYQ7DJH69?BlRd+e!yxDz&^q89cjzDDPF` zY;f4qm$}thau5XBrdZKV9@kLTj%UeQpu%EQSV<}@n5m%w293Lt<>F-tGuvaSI;O~4 zuPw>z}$vw1vY1B~J>pXNIh4teMEcX`q)QNuCseA52B zMfvSW=$sBchc`eK;XWMXE}3BCTcb+ufdqOB(SMG;0FgBCXA<)?-jHYE&rO_?LjXc- z@iF*ifnoFs_>$w zD)xWa{UG1ZdL?=v6Zdk8UMAlxc>(W+gYZNk-uEhUU-%n+CG5nzT62r>y@I*LwB1HL zg?rIj1#w}TP5dIDTqLr1LFjVvN-XN-f>-4W!k7A`Ad6)2g3wKe3e28tpW7%Cn0rvb zr6U)sVkPGdnzXrfOqAjpL2gA&Vhhz#1OjC&Jtr57s!<5O6xBk7rDPN;Y+@5e{Bs+i zv8hlU)#yT{WjhKLmK-WnxDaO$QlkJH3h>Ir~Nk}3#mk15!QXs3xaYY%|Enbx^a};G5937F*UF53iouaukVMUyX zQg5=&t(9c>OlWVh+I*vFZXL;X)K>c9TW)2TZ1`)YQ!kIst!A3LxHpztRgzng`nFq)cF!jtfy0P8H>9nmGg11QKT9peV=Ta(&Y=lmLgoBsFa%m zMNP~^i!+CCjWq>oDAp7xE9oy#RC1+2(E{8+D2?2v&*RRlsJ+1>5b(7wq5TrTGx3@& zl@>^`mW2v2ujm!C`HGbUdk+gT(KB_t!4J!3#w=lC0A1oTE^E(xY-k~B-dZ-cOqz9H z?G7N25Y1D&0~C}U<)_`De!@$;Lv3sW_tEZ9nR#e;s%J{@Nln&rC?kED-dPUneBUew zfF#1~nH8)&>z5U+Xns+Gkxl56<$y&_nmn=s)o`**`C~bC)SPW{juxrQeX*PxvX}0F z%X{pW%71oq+p*nemvRxA9)}SLoT*xO03A~ox?2Yip zryy%$-CX)CSa~yf0ZtQB5nb9NHiYZE$hJ4pX5A*gae{sbU%&bA1O5{371Lf5P^g<3 z;J4YUTeqqTnhuBIV_*PHl>@H|3-^#S zf+LCF_=hc-k76hf`jfz=5e#Q0K~5R2!fuXX2afmgctb?M#DXkl1Z})KI()xP26WVq zu8-@jE$$?9R0 z)qvhwg5KIJy(mWD92Z9*E4({8h#}YaW+7jQ{olnws~Tz81}?X`<5C>7tdNGBq#EUx z3GUa%mC{WVMO%s@EX77Jmb>u~w}o`3DucWrli+CDR3Dp0hkf<5ZF3@gFNT~B8uJo~Tz`14O869GSe3!fp4 zoNDwj`gD8o%};*%_iO8LZ*A@87k|8DKg55(R2RPxznmZG$5eZE=GFgIs(uZwX6qw^ z!nshsNG{^n=+Commands in `az datafactory` group @@ -103,6 +106,24 @@ |[az datafactory pipeline-run cancel](#PipelineRunsCancel)|Cancel|[Parameters](#ParametersPipelineRunsCancel)|[Example](#ExamplesPipelineRunsCancel)| |[az datafactory pipeline-run query-by-factory](#PipelineRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersPipelineRunsQueryByFactory)|[Example](#ExamplesPipelineRunsQueryByFactory)| +### Commands in `az datafactory private-end-point-connection` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory private-end-point-connection list](#privateEndPointConnectionsListByFactory)|ListByFactory|[Parameters](#ParametersprivateEndPointConnectionsListByFactory)|[Example](#ExamplesprivateEndPointConnectionsListByFactory)| + +### Commands in `az datafactory private-endpoint-connection` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory private-endpoint-connection show](#PrivateEndpointConnectionGet)|Get|[Parameters](#ParametersPrivateEndpointConnectionGet)|[Example](#ExamplesPrivateEndpointConnectionGet)| +|[az datafactory private-endpoint-connection create](#PrivateEndpointConnectionCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersPrivateEndpointConnectionCreateOrUpdate#Create)|[Example](#ExamplesPrivateEndpointConnectionCreateOrUpdate#Create)| +|[az datafactory private-endpoint-connection update](#PrivateEndpointConnectionCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersPrivateEndpointConnectionCreateOrUpdate#Update)|Not Found| +|[az datafactory private-endpoint-connection delete](#PrivateEndpointConnectionDelete)|Delete|[Parameters](#ParametersPrivateEndpointConnectionDelete)|[Example](#ExamplesPrivateEndpointConnectionDelete)| + +### Commands in `az datafactory private-link-resource` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory private-link-resource show](#privateLinkResourcesGet)|Get|[Parameters](#ParametersprivateLinkResourcesGet)|[Example](#ExamplesprivateLinkResourcesGet)| + ### Commands in `az datafactory trigger` group |CLI Command|Operation Swagger name|Parameters|Examples| |---------|------------|--------|-----------| @@ -179,6 +200,13 @@ az datafactory create --location "East US" --name "exampleFactoryName" --resourc |**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| |**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| |**--global-parameters**|dictionary|List of parameters for factory.|global_parameters|globalParameters| +|**--public-network-access**|choice|Whether or not public network access is allowed for the data factory.|public_network_access|publicNetworkAccess| +|**--key-name**|string|The name of the key in Azure Key Vault to use as Customer Managed Key.|key_name|keyName| +|**--vault-base-url**|string|The url of the Azure Key Vault used for CMK.|vault_base_url|vaultBaseUrl| +|**--key-version**|string|The version of the key used for CMK. If not provided, latest version will be used.|key_version|keyVersion| +|**--identity**|object|User assigned identity to use to authenticate to customer's key vault. If not provided Managed Service Identity will be used.|identity|identity| +|**--type**|choice|The identity type.|type|type| +|**--user-assigned-identities**|dictionary|List of user assigned identities for the factory.|user_assigned_identities|userAssignedIdentities| #### Command `az datafactory update` @@ -193,6 +221,8 @@ az datafactory update --name "exampleFactoryName" --tags exampleTag="exampleValu |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--tags**|dictionary|The resource tags.|tags|tags| +|**--type**|choice|The identity type.|type|type| +|**--user-assigned-identities**|dictionary|List of user assigned identities for the factory.|user_assigned_identities|userAssignedIdentities| #### Command `az datafactory delete` @@ -423,6 +453,7 @@ az datafactory integration-runtime linked-integration-runtime create --name "bfa |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| |**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| |**--description**|string|Integration runtime description.|managed_description|description| +|**--managed-virtual-network**|object|Managed Virtual Network reference.|managed_managed_virtual_network|managedVirtualNetwork| |**--compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties|computeProperties| |**--ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties|ssisProperties| @@ -823,7 +854,7 @@ et\\"}],\\"outputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"M "typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"Bl\ obSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.\ OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration "0.00:10:00" \ ---name "examplePipeline" --resource-group "exampleResourceGroup" +--pipeline-name "examplePipeline" --resource-group "exampleResourceGroup" ``` ##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| @@ -840,7 +871,7 @@ OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\ |**--annotations**|array|List of tags that can be used for describing the Pipeline.|annotations|annotations| |**--run-dimensions**|dictionary|Dimensions emitted by Pipeline.|run_dimensions|runDimensions| |**--duration**|any|TimeSpan value, after which an Azure Monitoring Metric is fired.|duration|duration| -|**--folder-name**|string|The name of the folder that this Pipeline is in.|folder_name|name| +|**--name**|string|The name of the folder that this Pipeline is in.|name|name| #### Command `az datafactory pipeline delete` @@ -924,6 +955,91 @@ operator="Equals" values="examplePipeline" --last-updated-after "2018-06-16T00:3 |**--filters**|array|List of filters.|filters|filters| |**--order-by**|array|List of OrderBy option.|order_by|orderBy| +### group `az datafactory private-end-point-connection` +#### Command `az datafactory private-end-point-connection list` + +##### Example +``` +az datafactory private-end-point-connection list --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| + +### group `az datafactory private-endpoint-connection` +#### Command `az datafactory private-endpoint-connection show` + +##### Example +``` +az datafactory private-endpoint-connection show --factory-name "exampleFactoryName" --name "connection" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName| +|**--if-none-match**|string|ETag of the private endpoint connection entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| + +#### Command `az datafactory private-endpoint-connection create` + +##### Example +``` +az datafactory private-endpoint-connection create --factory-name "exampleFactoryName" --name "connection" \ +--private-link-service-connection-state description="Approved by admin." actions-required="" status="Approved" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName| +|**--if-match**|string|ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--private-link-service-connection-state**|object|The state of a private link connection|private_link_service_connection_state|privateLinkServiceConnectionState| + +#### Command `az datafactory private-endpoint-connection update` + +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName| +|**--if-match**|string|ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--private-link-service-connection-state**|object|The state of a private link connection|private_link_service_connection_state|privateLinkServiceConnectionState| + +#### Command `az datafactory private-endpoint-connection delete` + +##### Example +``` +az datafactory private-endpoint-connection delete --factory-name "exampleFactoryName" --name "connection" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName| + +### group `az datafactory private-link-resource` +#### Command `az datafactory private-link-resource show` + +##### Example +``` +az datafactory private-link-resource show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| + ### group `az datafactory trigger` #### Command `az datafactory trigger list`