diff --git a/src/datafactory/HISTORY.rst b/src/datafactory/HISTORY.rst
index f4e5240e156..1c139576ba0 100644
--- a/src/datafactory/HISTORY.rst
+++ b/src/datafactory/HISTORY.rst
@@ -3,28 +3,6 @@
Release History
===============
-0.4.0
-+++++
-* GA the whole module
-
-0.3.0
-+++++
-* [BREAKING CHANGE] Renamed command subgroup `az datafactory factory` to `az datafactory`.
-* [BREAKING CHANGE] `az datafactory integration-runtime managed create`: `--type-properties-compute-properties` renamed to `--compute-properties`,
- `--type-properties-ssis-properties` renamed to `--ssis-properties`.
-* [BREAKING CHANGE] `az datafactory integration-runtime self-hosted create`: `--type-properties-linked-info` renamed to `--linked-info`.
-* [BREAKING CHANGE] `az datafactory integration-runtime update`: `--properties` renamed to `--linked-service`.
-* [BREAKING CHANGE] `az datafactory linked-service delete`: `--properties` renamed to `--dataset`.
-* [BREAKING CHANGE] `az datafactory trigger list`: `--properties` renamed to `--trigger`.
-
-0.2.1
-+++++
-* az datafactory factory create: Enable managed identity by default
-
-0.2.0
-++++++
-* add update command for linked services and triggers and datasets
-
0.1.0
++++++
* Initial release.
diff --git a/src/datafactory/azext_datafactory/azext_metadata.json b/src/datafactory/azext_datafactory/azext_metadata.json
index 3695b0d7077..cfc30c747c7 100644
--- a/src/datafactory/azext_datafactory/azext_metadata.json
+++ b/src/datafactory/azext_datafactory/azext_metadata.json
@@ -1,3 +1,4 @@
{
+ "azext.isExperimental": true,
"azext.minCliCoreVersion": "2.15.0"
}
\ No newline at end of file
diff --git a/src/datafactory/azext_datafactory/generated/_client_factory.py b/src/datafactory/azext_datafactory/generated/_client_factory.py
index 7db87b484da..837a01be1e4 100644
--- a/src/datafactory/azext_datafactory/generated/_client_factory.py
+++ b/src/datafactory/azext_datafactory/generated/_client_factory.py
@@ -54,3 +54,15 @@ def cf_trigger(cli_ctx, *_):
def cf_trigger_run(cli_ctx, *_):
return cf_datafactory_cl(cli_ctx).trigger_runs
+
+
+def cf_private_end_point_connection(cli_ctx, *_):
+ return cf_datafactory_cl(cli_ctx).private_end_point_connections
+
+
+def cf_private_endpoint_connection(cli_ctx, *_):
+ return cf_datafactory_cl(cli_ctx).private_endpoint_connection
+
+
+def cf_private_link_resource(cli_ctx, *_):
+ return cf_datafactory_cl(cli_ctx).private_link_resources
diff --git a/src/datafactory/azext_datafactory/generated/_help.py b/src/datafactory/azext_datafactory/generated/_help.py
index fd2ab1dcd0e..ceae15151d8 100644
--- a/src/datafactory/azext_datafactory/generated/_help.py
+++ b/src/datafactory/azext_datafactory/generated/_help.py
@@ -69,6 +69,14 @@
collaboration-branch: Required. Collaboration branch.
root-folder: Required. Root folder.
last-commit-id: Last commit id.
+ - name: --identity
+ short-summary: "User assigned identity to use to authenticate to customer's key vault. If not provided Managed \
+Service Identity will be used."
+ long-summary: |
+ Usage: --identity user-assigned-identity=XX
+
+ user-assigned-identity: The resource id of the user assigned identity to authenticate to customer's key \
+vault.
examples:
- name: Factories_CreateOrUpdate
text: |-
@@ -149,6 +157,14 @@
helps['datafactory get-git-hub-access-token'] = """
type: command
short-summary: "Get GitHub Access Token."
+ parameters:
+ - name: --git-hub-client-secret
+ short-summary: "GitHub bring your own app client secret information."
+ long-summary: |
+ Usage: --git-hub-client-secret byoa-secret-akv-url=XX byoa-secret-name=XX
+
+ byoa-secret-akv-url: Bring your own app client secret AKV URL.
+ byoa-secret-name: Bring your own app client secret name in AKV.
examples:
- name: Factories_GetGitHubAccessToken
text: |-
@@ -206,6 +222,13 @@
helps['datafactory integration-runtime managed create'] = """
type: command
short-summary: "Create an integration runtime."
+ parameters:
+ - name: --managed-virtual-network
+ short-summary: "Managed Virtual Network reference."
+ long-summary: |
+ Usage: --managed-virtual-network reference-name=XX
+
+ reference-name: Required. Reference ManagedVirtualNetwork name.
"""
helps['datafactory integration-runtime self-hosted'] = """
@@ -591,7 +614,7 @@
taset\\"}],\\"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"\
type\\":\\"BlobSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline(\
).parameters.OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration \
-"0.00:10:00" --name "examplePipeline" --resource-group "exampleResourceGroup"
+"0.00:10:00" --pipeline-name "examplePipeline" --resource-group "exampleResourceGroup"
"""
helps['datafactory pipeline delete'] = """
@@ -902,3 +925,92 @@
az datafactory trigger-run rerun --factory-name "exampleFactoryName" --resource-group \
"exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger"
"""
+
+helps['datafactory private-end-point-connection'] = """
+ type: group
+ short-summary: Manage private end point connection with datafactory
+"""
+
+helps['datafactory private-end-point-connection list'] = """
+ type: command
+ short-summary: "Lists Private endpoint connections."
+ examples:
+ - name: privateEndPointConnections_ListByFactory
+ text: |-
+ az datafactory private-end-point-connection list --factory-name "exampleFactoryName" --resource-group \
+"exampleResourceGroup"
+"""
+
+helps['datafactory private-endpoint-connection'] = """
+ type: group
+ short-summary: Manage private endpoint connection with datafactory
+"""
+
+helps['datafactory private-endpoint-connection show'] = """
+ type: command
+ short-summary: "Gets a private endpoint connection."
+ examples:
+ - name: Get a private endpoint connection for a datafactory.
+ text: |-
+ az datafactory private-endpoint-connection show --factory-name "exampleFactoryName" --name "connection" \
+--resource-group "exampleResourceGroup"
+"""
+
+helps['datafactory private-endpoint-connection create'] = """
+ type: command
+ short-summary: "Approves or rejects a private endpoint connection."
+ parameters:
+ - name: --private-link-service-connection-state
+ short-summary: "The state of a private link connection"
+ long-summary: |
+ Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX
+
+ status: Status of a private link connection
+ description: Description of a private link connection
+ actions-required: ActionsRequired for a private link connection
+ examples:
+ - name: Approves or rejects a private endpoint connection for a factory.
+ text: |-
+ az datafactory private-endpoint-connection create --factory-name "exampleFactoryName" --name \
+"connection" --private-link-service-connection-state description="Approved by admin." actions-required="" \
+status="Approved" --resource-group "exampleResourceGroup"
+"""
+
+helps['datafactory private-endpoint-connection update'] = """
+ type: command
+ short-summary: "Approves or rejects a private endpoint connection."
+ parameters:
+ - name: --private-link-service-connection-state
+ short-summary: "The state of a private link connection"
+ long-summary: |
+ Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX
+
+ status: Status of a private link connection
+ description: Description of a private link connection
+ actions-required: ActionsRequired for a private link connection
+"""
+
+helps['datafactory private-endpoint-connection delete'] = """
+ type: command
+ short-summary: "Deletes a private endpoint connection."
+ examples:
+ - name: Delete a private endpoint connection for a datafactory.
+ text: |-
+ az datafactory private-endpoint-connection delete --factory-name "exampleFactoryName" --name \
+"connection" --resource-group "exampleResourceGroup"
+"""
+
+helps['datafactory private-link-resource'] = """
+ type: group
+ short-summary: Manage private link resource with datafactory
+"""
+
+helps['datafactory private-link-resource show'] = """
+ type: command
+ short-summary: "Gets the private link resources."
+ examples:
+ - name: Get private link resources of a site
+ text: |-
+ az datafactory private-link-resource show --factory-name "exampleFactoryName" --resource-group \
+"exampleResourceGroup"
+"""
diff --git a/src/datafactory/azext_datafactory/generated/_params.py b/src/datafactory/azext_datafactory/generated/_params.py
index 2162b81c231..cb4731f7eb3 100644
--- a/src/datafactory/azext_datafactory/generated/_params.py
+++ b/src/datafactory/azext_datafactory/generated/_params.py
@@ -24,9 +24,13 @@
from azext_datafactory.action import (
AddFactoryVstsConfiguration,
AddFactoryGitHubConfiguration,
+ AddIdentity,
+ AddGitHubClientSecret,
+ AddManagedVirtualNetwork,
AddFolder,
AddFilters,
- AddOrderBy
+ AddOrderBy,
+ AddPrivateLinkServiceConnectionState
)
@@ -57,12 +61,33 @@ def load_arguments(self, _):
'GitHub repo information.', arg_group='RepoConfiguration')
c.argument('global_parameters', type=validate_file_or_dict, help='List of parameters for factory. Expected '
'value: json-string/@json-file.')
+ c.argument('public_network_access', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Whether or not '
+ 'public network access is allowed for the data factory.')
+ c.argument('key_name', type=str, help='The name of the key in Azure Key Vault to use as Customer Managed Key.',
+ arg_group='Encryption')
+ c.argument('vault_base_url', type=str, help='The url of the Azure Key Vault used for CMK.',
+ arg_group='Encryption')
+ c.argument('key_version', type=str, help='The version of the key used for CMK. If not provided, latest version '
+ 'will be used.', arg_group='Encryption')
+ c.argument('identity', action=AddIdentity, nargs='+', help='User assigned identity to use to authenticate to '
+ 'customer\'s key vault. If not provided Managed Service Identity will be used.',
+ arg_group='Encryption')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned']),
+ help='The identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='List of user assigned identities for '
+ 'the factory. Expected value: json-string/@json-file.', arg_group='Identity')
with self.argument_context('datafactory update') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.',
id_part='name')
c.argument('tags', tags_type)
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned',
+ 'SystemAssigned,UserAssigned']),
+ help='The identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='List of user assigned identities for '
+ 'the factory. Expected value: json-string/@json-file.', arg_group='Identity')
with self.argument_context('datafactory delete') as c:
c.argument('resource_group_name', resource_group_name_type)
@@ -98,6 +123,8 @@ def load_arguments(self, _):
id_part='name')
c.argument('git_hub_access_code', type=str, help='GitHub access code.')
c.argument('git_hub_client_id', type=str, help='GitHub application client ID.')
+ c.argument('git_hub_client_secret', action=AddGitHubClientSecret, nargs='+', help='GitHub bring your own app '
+ 'client secret information.')
c.argument('git_hub_access_token_base_url', type=str, help='GitHub access token base URL.')
with self.argument_context('datafactory integration-runtime list') as c:
@@ -133,6 +160,8 @@ def load_arguments(self, _):
c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for '
'update, for which it should match existing entity or can be * for unconditional update.')
c.argument('description', type=str, help='Integration runtime description.')
+ c.argument('managed_virtual_network', action=AddManagedVirtualNetwork, nargs='+', help='Managed Virtual '
+ 'Network reference.')
c.argument('compute_properties', type=validate_file_or_dict, help='The compute resource for managed '
'integration runtime. Expected value: json-string/@json-file.', arg_group='Type Properties')
c.argument('ssis_properties', type=validate_file_or_dict, help='SSIS properties for managed integration '
@@ -386,8 +415,7 @@ def load_arguments(self, _):
with self.argument_context('datafactory pipeline update') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name')
- c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline '
- 'name.', id_part='child_name_1')
+ c.argument('pipeline_name', type=str, help='The pipeline name.', id_part='child_name_1')
c.argument('if_match', type=str, help='ETag of the pipeline entity. Should only be specified for update, for '
'which it should match existing entity or can be * for unconditional update.')
c.argument('description', type=str, help='The description of the pipeline.')
@@ -404,8 +432,7 @@ def load_arguments(self, _):
'json-string/@json-file.')
c.argument('duration', type=validate_file_or_dict, help='TimeSpan value, after which an Azure Monitoring '
'Metric is fired. Expected value: json-string/@json-file.', arg_group='Policy Elapsed Time Metric')
- c.argument('folder_name', type=str, help='The name of the folder that this Pipeline is in.',
- arg_group='Folder')
+ c.argument('name', type=str, help='The name of the folder that this Pipeline is in.', arg_group='Folder')
c.ignore('pipeline')
with self.argument_context('datafactory pipeline delete') as c:
@@ -578,3 +605,52 @@ def load_arguments(self, _):
c.argument('factory_name', type=str, help='The factory name.', id_part='name')
c.argument('trigger_name', type=str, help='The trigger name.', id_part='child_name_1')
c.argument('run_id', type=str, help='The pipeline run identifier.', id_part='child_name_2')
+
+ with self.argument_context('datafactory private-end-point-connection list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.')
+
+ with self.argument_context('datafactory private-endpoint-connection show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The private endpoint '
+ 'connection name.', id_part='child_name_1')
+ c.argument('if_none_match', type=str, help='ETag of the private endpoint connection entity. Should only be '
+ 'specified for get. If the ETag matches the existing entity tag, or if * was provided, then no '
+ 'content will be returned.')
+
+ with self.argument_context('datafactory private-endpoint-connection create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The private endpoint '
+ 'connection name.')
+ c.argument('if_match', type=str, help='ETag of the private endpoint connection entity. Should only be '
+ 'specified for update, for which it should match existing entity or can be * for unconditional '
+ 'update.')
+ c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+',
+ help='The state of a private link connection')
+
+ with self.argument_context('datafactory private-endpoint-connection update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The private endpoint '
+ 'connection name.', id_part='child_name_1')
+ c.argument('if_match', type=str, help='ETag of the private endpoint connection entity. Should only be '
+ 'specified for update, for which it should match existing entity or can be * for unconditional '
+ 'update.')
+ c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+',
+ help='The state of a private link connection')
+
+ with self.argument_context('datafactory private-endpoint-connection delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The private endpoint '
+ 'connection name.', id_part='child_name_1')
+
+ with self.argument_context('datafactory private-link-resource show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('factory_name', type=str, help='The factory name.', id_part='name')
diff --git a/src/datafactory/azext_datafactory/generated/action.py b/src/datafactory/azext_datafactory/generated/action.py
index f645d72981a..1b245767efd 100644
--- a/src/datafactory/azext_datafactory/generated/action.py
+++ b/src/datafactory/azext_datafactory/generated/action.py
@@ -90,6 +90,84 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use
return d
+class AddIdentity(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.identity = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'user-assigned-identity':
+ d['user_assigned_identity'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter identity. All possible keys are: '
+ 'user-assigned-identity'.format(k))
+ return d
+
+
+class AddGitHubClientSecret(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.git_hub_client_secret = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'byoa-secret-akv-url':
+ d['byoa_secret_akv_url'] = v[0]
+ elif kl == 'byoa-secret-name':
+ d['byoa_secret_name'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter git_hub_client_secret. All possible keys '
+ 'are: byoa-secret-akv-url, byoa-secret-name'.format(k))
+ return d
+
+
+class AddManagedVirtualNetwork(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.managed_virtual_network = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ d['type'] = "ManagedVirtualNetworkReference"
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'reference-name':
+ d['reference_name'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter managed_virtual_network. All possible '
+ 'keys are: reference-name'.format(k))
+ return d
+
+
class AddFolder(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
action = self.get_action(values, option_string)
@@ -111,7 +189,7 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use
d['name'] = v[0]
else:
raise CLIError('Unsupported Key {} is provided for parameter folder. All possible keys are: name'.
- format(k))
+ format(k))
return d
@@ -169,3 +247,32 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use
raise CLIError('Unsupported Key {} is provided for parameter order_by. All possible keys are: '
'order-by, order'.format(k))
return d
+
+
+class AddPrivateLinkServiceConnectionState(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.private_link_service_connection_state = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'status':
+ d['status'] = v[0]
+ elif kl == 'description':
+ d['description'] = v[0]
+ elif kl == 'actions-required':
+ d['actions_required'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter private_link_service_connection_state. '
+ 'All possible keys are: status, description, actions-required'.format(k))
+ return d
diff --git a/src/datafactory/azext_datafactory/generated/commands.py b/src/datafactory/azext_datafactory/generated/commands.py
index 83b7f9db34e..f5a297d1496 100644
--- a/src/datafactory/azext_datafactory/generated/commands.py
+++ b/src/datafactory/azext_datafactory/generated/commands.py
@@ -20,7 +20,7 @@ def load_command_table(self, _):
operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._factories_operations#FactoriesOperatio'
'ns.{}',
client_factory=cf_factory)
- with self.command_group('datafactory', datafactory_factory, client_factory=cf_factory) as g:
+ with self.command_group('datafactory', datafactory_factory, client_factory=cf_factory, is_experimental=True) as g:
g.custom_command('list', 'datafactory_list')
g.custom_show_command('show', 'datafactory_show')
g.custom_command('create', 'datafactory_create')
@@ -157,3 +157,33 @@ def load_command_table(self, _):
g.custom_command('cancel', 'datafactory_trigger_run_cancel')
g.custom_command('query-by-factory', 'datafactory_trigger_run_query_by_factory')
g.custom_command('rerun', 'datafactory_trigger_run_rerun')
+
+ from azext_datafactory.generated._client_factory import cf_private_end_point_connection
+ datafactory_private_end_point_connection = CliCommandType(
+ operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._private_end_point_connections_operatio'
+ 'ns#PrivateEndPointConnectionsOperations.{}',
+ client_factory=cf_private_end_point_connection)
+ with self.command_group('datafactory private-end-point-connection', datafactory_private_end_point_connection,
+ client_factory=cf_private_end_point_connection) as g:
+ g.custom_command('list', 'datafactory_private_end_point_connection_list')
+
+ from azext_datafactory.generated._client_factory import cf_private_endpoint_connection
+ datafactory_private_endpoint_connection = CliCommandType(
+ operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._private_endpoint_connection_operations'
+ '#PrivateEndpointConnectionOperations.{}',
+ client_factory=cf_private_endpoint_connection)
+ with self.command_group('datafactory private-endpoint-connection', datafactory_private_endpoint_connection,
+ client_factory=cf_private_endpoint_connection) as g:
+ g.custom_show_command('show', 'datafactory_private_endpoint_connection_show')
+ g.custom_command('create', 'datafactory_private_endpoint_connection_create')
+ g.custom_command('update', 'datafactory_private_endpoint_connection_update')
+ g.custom_command('delete', 'datafactory_private_endpoint_connection_delete', confirmation=True)
+
+ from azext_datafactory.generated._client_factory import cf_private_link_resource
+ datafactory_private_link_resource = CliCommandType(
+ operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._private_link_resources_operations#Priv'
+ 'ateLinkResourcesOperations.{}',
+ client_factory=cf_private_link_resource)
+ with self.command_group('datafactory private-link-resource', datafactory_private_link_resource,
+ client_factory=cf_private_link_resource) as g:
+ g.custom_show_command('show', 'datafactory_private_link_resource_show')
diff --git a/src/datafactory/azext_datafactory/generated/custom.py b/src/datafactory/azext_datafactory/generated/custom.py
index c269c1999ff..42209fa3e2d 100644
--- a/src/datafactory/azext_datafactory/generated/custom.py
+++ b/src/datafactory/azext_datafactory/generated/custom.py
@@ -39,7 +39,14 @@ def datafactory_create(client,
tags=None,
factory_vsts_configuration=None,
factory_git_hub_configuration=None,
- global_parameters=None):
+ global_parameters=None,
+ public_network_access=None,
+ key_name=None,
+ vault_base_url=None,
+ key_version=None,
+ identity=None,
+ type_=None,
+ user_assigned_identities=None):
all_repo_configuration = []
if factory_vsts_configuration is not None:
all_repo_configuration.append(factory_vsts_configuration)
@@ -54,8 +61,15 @@ def datafactory_create(client,
factory['tags'] = tags
factory['repo_configuration'] = repo_configuration
factory['global_parameters'] = global_parameters
+ factory['public_network_access'] = public_network_access
factory['encryption'] = {}
+ factory['encryption']['key_name'] = key_name
+ factory['encryption']['vault_base_url'] = vault_base_url
+ factory['encryption']['key_version'] = key_version
+ factory['encryption']['identity'] = identity
factory['identity'] = {}
+ factory['identity']['type'] = type_
+ factory['identity']['user_assigned_identities'] = user_assigned_identities
return client.create_or_update(resource_group_name=resource_group_name,
factory_name=factory_name,
if_match=if_match,
@@ -65,10 +79,14 @@ def datafactory_create(client,
def datafactory_update(client,
resource_group_name,
factory_name,
- tags=None):
+ tags=None,
+ type_=None,
+ user_assigned_identities=None):
factory_update_parameters = {}
factory_update_parameters['tags'] = tags
factory_update_parameters['identity'] = {}
+ factory_update_parameters['identity']['type'] = type_
+ factory_update_parameters['identity']['user_assigned_identities'] = user_assigned_identities
return client.update(resource_group_name=resource_group_name,
factory_name=factory_name,
factory_update_parameters=factory_update_parameters)
@@ -126,10 +144,12 @@ def datafactory_get_git_hub_access_token(client,
factory_name,
git_hub_access_code,
git_hub_access_token_base_url,
- git_hub_client_id=None):
+ git_hub_client_id=None,
+ git_hub_client_secret=None):
git_hub_access_token_request = {}
git_hub_access_token_request['git_hub_access_code'] = git_hub_access_code
git_hub_access_token_request['git_hub_client_id'] = git_hub_client_id
+ git_hub_access_token_request['git_hub_client_secret'] = git_hub_client_secret
git_hub_access_token_request['git_hub_access_token_base_url'] = git_hub_access_token_base_url
return client.get_git_hub_access_token(resource_group_name=resource_group_name,
factory_name=factory_name,
@@ -179,12 +199,14 @@ def datafactory_integration_runtime_managed_create(client,
integration_runtime_name,
if_match=None,
description=None,
+ managed_virtual_network=None,
compute_properties=None,
ssis_properties=None):
integration_runtime = {}
integration_runtime['properties'] = {}
integration_runtime['properties']['type'] = 'Managed'
integration_runtime['properties']['description'] = description
+ integration_runtime['properties']['managed_virtual_network'] = managed_virtual_network
integration_runtime['properties']['compute_properties'] = compute_properties
integration_runtime['properties']['ssis_properties'] = ssis_properties
return client.create_or_update(resource_group_name=resource_group_name,
@@ -566,7 +588,7 @@ def datafactory_pipeline_update(instance,
annotations=None,
run_dimensions=None,
duration=None,
- folder_name=None):
+ name=None):
if description is not None:
instance.description = description
if activities is not None:
@@ -583,8 +605,8 @@ def datafactory_pipeline_update(instance,
instance.run_dimensions = run_dimensions
if duration is not None:
instance.elapsed_time_metric.duration = duration
- if folder_name is not None:
- instance.folder.name = folder_name
+ if name is not None:
+ instance.folder.name = name
return instance
@@ -841,3 +863,69 @@ def datafactory_trigger_run_rerun(client,
factory_name=factory_name,
trigger_name=trigger_name,
run_id=run_id)
+
+
+def datafactory_private_end_point_connection_list(client,
+ resource_group_name,
+ factory_name):
+ return client.list_by_factory(resource_group_name=resource_group_name,
+ factory_name=factory_name)
+
+
+def datafactory_private_endpoint_connection_show(client,
+ resource_group_name,
+ factory_name,
+ private_endpoint_connection_name,
+ if_none_match=None):
+ return client.get(resource_group_name=resource_group_name,
+ factory_name=factory_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ if_none_match=if_none_match)
+
+
+def datafactory_private_endpoint_connection_create(client,
+ resource_group_name,
+ factory_name,
+ private_endpoint_connection_name,
+ if_match=None,
+ private_link_service_connection_state=None):
+ private_endpoint_wrapper = {}
+ private_endpoint_wrapper['properties'] = {}
+ private_endpoint_wrapper['properties']['private_link_service_connection_state'] = private_link_service_connection_state
+ return client.create_or_update(resource_group_name=resource_group_name,
+ factory_name=factory_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ if_match=if_match,
+ private_endpoint_wrapper=private_endpoint_wrapper)
+
+
+def datafactory_private_endpoint_connection_update(client,
+ resource_group_name,
+ factory_name,
+ private_endpoint_connection_name,
+ if_match=None,
+ private_link_service_connection_state=None):
+ private_endpoint_wrapper = {}
+ private_endpoint_wrapper['properties'] = {}
+ private_endpoint_wrapper['properties']['private_link_service_connection_state'] = private_link_service_connection_state
+ return client.create_or_update(resource_group_name=resource_group_name,
+ factory_name=factory_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ if_match=if_match,
+ private_endpoint_wrapper=private_endpoint_wrapper)
+
+
+def datafactory_private_endpoint_connection_delete(client,
+ resource_group_name,
+ factory_name,
+ private_endpoint_connection_name):
+ return client.delete(resource_group_name=resource_group_name,
+ factory_name=factory_name,
+ private_endpoint_connection_name=private_endpoint_connection_name)
+
+
+def datafactory_private_link_resource_show(client,
+ resource_group_name,
+ factory_name):
+ return client.get(resource_group_name=resource_group_name,
+ factory_name=factory_name)
diff --git a/src/datafactory/azext_datafactory/tests/latest/example_steps.py b/src/datafactory/azext_datafactory/tests/latest/example_steps.py
index 42222d4e576..791da887268 100644
--- a/src/datafactory/azext_datafactory/tests/latest/example_steps.py
+++ b/src/datafactory/azext_datafactory/tests/latest/example_steps.py
@@ -581,7 +581,7 @@ def step_pipeline_update(test, rg, checks=None):
'ipeline().parameters.OutputBlobNameList\\"}}}}}}]" '
'--parameters "{{\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}}" '
'--duration "0.00:10:00" '
- '--name "{myPipeline}" '
+ '--pipeline-name "{myPipeline}" '
'--resource-group "{rg}"',
checks=checks)
@@ -634,6 +634,66 @@ def step_pipeline_delete(test, rg, checks=None):
checks=checks)
+# EXAMPLE: /privateEndPointConnections/get/privateEndPointConnections_ListByFactory
+@try_manual
+def step_private_end_point_connection_list(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datafactory private-end-point-connection list '
+ '--factory-name "{myFactory}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnection/put/Approves or rejects a private endpoint connection for a factory.
+@try_manual
+def step_private_endpoint_connection_create(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datafactory private-endpoint-connection create '
+ '--factory-name "{myFactory}" '
+ '--name "{myPrivateEndPointConnection}" '
+ '--private-link-service-connection-state description="Approved by admin." actions-required="" '
+ 'status="Approved" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnection/get/Get a private endpoint connection for a datafactory.
+@try_manual
+def step_private_endpoint_connection_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datafactory private-endpoint-connection show '
+ '--factory-name "{myFactory}" '
+ '--name "{myPrivateEndPointConnection}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnection/delete/Delete a private endpoint connection for a datafactory.
+@try_manual
+def step_private_endpoint_connection_delete(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datafactory private-endpoint-connection delete -y '
+ '--factory-name "{myFactory}" '
+ '--name "{myPrivateEndPointConnection}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /privateLinkResources/get/Get private link resources of a site
+@try_manual
+def step_private_link_resource_show(test, rg, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az datafactory private-link-resource show '
+ '--factory-name "{myFactory}" '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
# EXAMPLE: /Triggers/put/Triggers_Create
@try_manual
def step_trigger_create(test, rg, checks=None):
diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py
index 517a35650f8..d6795dbdb8b 100644
--- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py
+++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py
@@ -174,6 +174,7 @@ def __init__(self, *args, **kwargs):
'myDataset': self.create_random_name(prefix='exampleDataset'[:7], length=14),
'myPipeline': self.create_random_name(prefix='examplePipeline'[:7], length=15),
'myTrigger': self.create_random_name(prefix='exampleTrigger'[:7], length=14),
+ 'myPrivateEndPointConnection': 'connection',
})
@ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg')
diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md
deleted file mode 100644
index b7eabe4528a..00000000000
--- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md
+++ /dev/null
@@ -1,48 +0,0 @@
-|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|
-|step_create|successed||||2021-04-26 09:05:32.308913|2021-04-26 09:05:32.501033|
-|step_update|successed||||2021-04-26 09:05:22.750754|2021-04-26 09:05:22.880707|
-|step_linked_service_create|successed||||2021-04-26 09:05:22.880707|2021-04-26 09:05:23.009706|
-|step_linked_service_update|successed||||2021-04-26 09:05:23.010706|2021-04-26 09:05:23.174579|
-|step_dataset_create|successed||||2021-04-26 09:05:23.174579|2021-04-26 09:05:23.317043|
-|step_dataset_update|successed||||2021-04-26 09:05:23.318045|2021-04-26 09:05:23.451047|
-|step_pipeline_create|successed||||2021-04-26 09:05:23.452049|2021-04-26 09:05:23.575751|
-|step_trigger_create|successed||||2021-04-26 09:05:23.703756|2021-04-26 09:05:23.871057|
-|step_trigger_update|successed||||2021-04-26 09:05:23.871057|2021-04-26 09:05:24.019053|
-|step_integration_runtime_self_hosted_create|successed||||2021-04-26 09:05:24.019053|2021-04-26 09:05:24.155099|
-|step_integration_runtime_update|successed||||2021-04-26 09:05:24.155099|2021-04-26 09:05:24.285096|
-|step_integration_runtime_show|successed||||2021-04-26 09:05:29.524820|2021-04-26 09:05:29.675815|
-|step_linked_service_show|successed||||2021-04-26 09:05:24.582291|2021-04-26 09:05:24.718292|
-|step_pipeline_show|successed||||2021-04-26 09:05:24.719291|2021-04-26 09:05:24.872517|
-|step_dataset_show|successed||||2021-04-26 09:05:24.873517|2021-04-26 09:05:25.000030|
-|step_trigger_show|successed||||2021-04-26 09:05:33.782136|2021-04-26 09:05:33.927138|
-|step_integration_runtime_list|successed||||2021-04-26 09:05:25.115003|2021-04-26 09:05:25.253055|
-|step_linked_service_list|successed||||2021-04-26 09:05:25.254059|2021-04-26 09:05:25.409635|
-|step_pipeline_list|successed||||2021-04-26 09:05:25.409635|2021-04-26 09:05:25.533704|
-|step_trigger_list|successed||||2021-04-26 09:05:25.533704|2021-04-26 09:05:25.676865|
-|step_dataset_list|successed||||2021-04-26 09:05:25.676865|2021-04-26 09:05:25.810871|
-|step_show|successed||||2021-04-26 09:05:25.810871|2021-04-26 09:05:25.938042|
-|step_list2|successed||||2021-04-26 09:05:25.938042|2021-04-26 09:05:26.060042|
-|step_list|successed||||2021-04-26 09:05:26.060042|2021-04-26 09:05:26.183196|
-|step_integration_runtime_regenerate_auth_key|successed||||2021-04-26 09:05:26.184194|2021-04-26 09:05:26.313194|
-|step_integration_runtime_sync_credentials|successed||||2021-04-26 09:05:26.314192|2021-04-26 09:05:26.449307|
-|step_integration_runtime_get_monitoring_data|successed||||2021-04-26 09:05:26.449307|2021-04-26 09:05:26.636000|
-|step_integration_runtime_list_auth_key|successed||||2021-04-26 09:05:26.636000|2021-04-26 09:05:26.790002|
-|step_integration_runtime_remove_link|successed||||2021-04-26 09:05:26.791005|2021-04-26 09:05:26.934513|
-|step_integration_runtime_get_status|successed||||2021-04-26 09:05:26.935512|2021-04-26 09:05:27.069511|
-|step_trigger_get_event_subscription_status|successed||||2021-04-26 09:05:27.069511|2021-04-26 09:05:27.211487|
-|step_trigger_unsubscribe_from_event|successed||||2021-04-26 09:05:27.212492|2021-04-26 09:05:27.402802|
-|step_trigger_subscribe_to_event|successed||||2021-04-26 09:05:27.402802|2021-04-26 09:05:27.532807|
-|step_trigger_start|successed||||2021-04-26 09:05:33.632612|2021-04-26 09:05:33.782136|
-|step_trigger_stop|successed||||2021-04-26 09:05:34.611518|2021-04-26 09:05:34.768873|
-|step_get_data_plane_access|successed||||2021-04-26 09:05:27.837090|2021-04-26 09:05:27.977072|
-|step_configure_factory_repo|successed||||2021-04-26 09:05:28.099075|2021-04-26 09:05:28.288426|
-|step_integration_runtime_delete|successed||||2021-04-26 09:05:31.965947|2021-04-26 09:05:32.140944|
-|step_trigger_delete|successed||||2021-04-26 09:05:34.768873|2021-04-26 09:05:34.900878|
-|step_pipeline_delete|successed||||2021-04-26 09:05:34.900878|2021-04-26 09:05:35.030991|
-|step_dataset_delete|successed||||2021-04-26 09:05:28.737334|2021-04-26 09:05:28.861337|
-|step_linked_service_delete|successed||||2021-04-26 09:05:28.861337|2021-04-26 09:05:28.989612|
-|step_delete|successed||||2021-04-26 09:05:35.031990|2021-04-26 09:05:35.197507|
-|step_integration_runtime_start|successed||||2021-04-26 09:05:29.676815|2021-04-26 09:05:30.373119|
-|step_integration_runtime_stop|successed||||2021-04-26 09:05:30.374118|2021-04-26 09:05:31.964925|
-|step_activity_run_query_by_pipeline_run|successed||||2021-04-26 09:05:33.012581|2021-04-26 09:05:33.193579|
-Coverage: 46/46
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py
index f272437a3e9..9dfe04b82d0 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py
@@ -107,7 +107,6 @@ def __init__(
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
- self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.operations = Operations(
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py
deleted file mode 100644
index 411d6c4a66e..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from typing import Any, TYPE_CHECKING
-
-from azure.core.configuration import Configuration
-from azure.core.pipeline import policies
-from azure.mgmt.core.policies import ARMHttpLoggingPolicy
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from azure.core.credentials_async import AsyncTokenCredential
-
-VERSION = "unknown"
-
-class DataFactoryManagementClientConfiguration(Configuration):
- """Configuration for DataFactoryManagementClient.
-
- Note that all parameters used to create this instance are saved as instance
- attributes.
-
- :param credential: Credential needed for the client to connect to Azure.
- :type credential: ~azure.core.credentials_async.AsyncTokenCredential
- :param subscription_id: The subscription identifier.
- :type subscription_id: str
- """
-
- def __init__(
- self,
- credential: "AsyncTokenCredential",
- subscription_id: str,
- **kwargs: Any
- ) -> None:
- if credential is None:
- raise ValueError("Parameter 'credential' must not be None.")
- if subscription_id is None:
- raise ValueError("Parameter 'subscription_id' must not be None.")
- super(DataFactoryManagementClientConfiguration, self).__init__(**kwargs)
-
- self.credential = credential
- self.subscription_id = subscription_id
- self.api_version = "2018-06-01"
- self.credential_scopes = ['https://management.azure.com/.default']
- self.credential_scopes.extend(kwargs.pop('credential_scopes', []))
- kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION))
- self._configure(**kwargs)
-
- def _configure(
- self,
- **kwargs: Any
- ) -> None:
- self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
- self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
- self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
- self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
- self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
- self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
- self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
- self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
- self.authentication_policy = kwargs.get('authentication_policy')
- if self.credential and not self.authentication_policy:
- self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py
index 255a1839c21..01497b56d61 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py
@@ -104,7 +104,6 @@ def __init__(
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
- self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.operations = Operations(
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py
deleted file mode 100644
index b2b322686b8..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from typing import Any, Optional, TYPE_CHECKING
-
-from azure.mgmt.core import AsyncARMPipelineClient
-from msrest import Deserializer, Serializer
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from azure.core.credentials_async import AsyncTokenCredential
-
-from ._configuration_async import DataFactoryManagementClientConfiguration
-from .operations_async import OperationOperations
-from .operations_async import FactoryOperations
-from .operations_async import ExposureControlOperations
-from .operations_async import IntegrationRuntimeOperations
-from .operations_async import IntegrationRuntimeObjectMetadataOperations
-from .operations_async import IntegrationRuntimeNodeOperations
-from .operations_async import LinkedServiceOperations
-from .operations_async import DatasetOperations
-from .operations_async import PipelineOperations
-from .operations_async import PipelineRunOperations
-from .operations_async import ActivityRunOperations
-from .operations_async import TriggerOperations
-from .operations_async import TriggerRunOperations
-from .operations_async import DataFlowOperations
-from .operations_async import DataFlowDebugSessionOperations
-from .operations_async import ManagedVirtualNetworkOperations
-from .operations_async import ManagedPrivateEndpointOperations
-from .. import models
-
-
-class DataFactoryManagementClient(object):
- """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services.
-
- :ivar operation: OperationOperations operations
- :vartype operation: data_factory_management_client.aio.operations_async.OperationOperations
- :ivar factory: FactoryOperations operations
- :vartype factory: data_factory_management_client.aio.operations_async.FactoryOperations
- :ivar exposure_control: ExposureControlOperations operations
- :vartype exposure_control: data_factory_management_client.aio.operations_async.ExposureControlOperations
- :ivar integration_runtime: IntegrationRuntimeOperations operations
- :vartype integration_runtime: data_factory_management_client.aio.operations_async.IntegrationRuntimeOperations
- :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations
- :vartype integration_runtime_object_metadata: data_factory_management_client.aio.operations_async.IntegrationRuntimeObjectMetadataOperations
- :ivar integration_runtime_node: IntegrationRuntimeNodeOperations operations
- :vartype integration_runtime_node: data_factory_management_client.aio.operations_async.IntegrationRuntimeNodeOperations
- :ivar linked_service: LinkedServiceOperations operations
- :vartype linked_service: data_factory_management_client.aio.operations_async.LinkedServiceOperations
- :ivar dataset: DatasetOperations operations
- :vartype dataset: data_factory_management_client.aio.operations_async.DatasetOperations
- :ivar pipeline: PipelineOperations operations
- :vartype pipeline: data_factory_management_client.aio.operations_async.PipelineOperations
- :ivar pipeline_run: PipelineRunOperations operations
- :vartype pipeline_run: data_factory_management_client.aio.operations_async.PipelineRunOperations
- :ivar activity_run: ActivityRunOperations operations
- :vartype activity_run: data_factory_management_client.aio.operations_async.ActivityRunOperations
- :ivar trigger: TriggerOperations operations
- :vartype trigger: data_factory_management_client.aio.operations_async.TriggerOperations
- :ivar trigger_run: TriggerRunOperations operations
- :vartype trigger_run: data_factory_management_client.aio.operations_async.TriggerRunOperations
- :ivar data_flow: DataFlowOperations operations
- :vartype data_flow: data_factory_management_client.aio.operations_async.DataFlowOperations
- :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations
- :vartype data_flow_debug_session: data_factory_management_client.aio.operations_async.DataFlowDebugSessionOperations
- :ivar managed_virtual_network: ManagedVirtualNetworkOperations operations
- :vartype managed_virtual_network: data_factory_management_client.aio.operations_async.ManagedVirtualNetworkOperations
- :ivar managed_private_endpoint: ManagedPrivateEndpointOperations operations
- :vartype managed_private_endpoint: data_factory_management_client.aio.operations_async.ManagedPrivateEndpointOperations
- :param credential: Credential needed for the client to connect to Azure.
- :type credential: ~azure.core.credentials_async.AsyncTokenCredential
- :param subscription_id: The subscription identifier.
- :type subscription_id: str
- :param str base_url: Service URL
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- """
-
- def __init__(
- self,
- credential: "AsyncTokenCredential",
- subscription_id: str,
- base_url: Optional[str] = None,
- **kwargs: Any
- ) -> None:
- if not base_url:
- base_url = 'https://management.azure.com'
- self._config = DataFactoryManagementClientConfiguration(credential, subscription_id, **kwargs)
- self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
-
- client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
- self._serialize = Serializer(client_models)
- self._deserialize = Deserializer(client_models)
-
- self.operation = OperationOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.factory = FactoryOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.exposure_control = ExposureControlOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.integration_runtime = IntegrationRuntimeOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.integration_runtime_node = IntegrationRuntimeNodeOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.linked_service = LinkedServiceOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.dataset = DatasetOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.pipeline = PipelineOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.pipeline_run = PipelineRunOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.activity_run = ActivityRunOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.trigger = TriggerOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.trigger_run = TriggerRunOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.data_flow = DataFlowOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.data_flow_debug_session = DataFlowDebugSessionOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.managed_virtual_network = ManagedVirtualNetworkOperations(
- self._client, self._config, self._serialize, self._deserialize)
- self.managed_private_endpoint = ManagedPrivateEndpointOperations(
- self._client, self._config, self._serialize, self._deserialize)
-
- async def close(self) -> None:
- await self._client.close()
-
- async def __aenter__(self) -> "DataFactoryManagementClient":
- await self._client.__aenter__()
- return self
-
- async def __aexit__(self, *exc_details) -> None:
- await self._client.__aexit__(*exc_details)
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py
deleted file mode 100644
index 554e3ba9232..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-
-from ._operation_operations_async import OperationOperations
-from ._factory_operations_async import FactoryOperations
-from ._exposure_control_operations_async import ExposureControlOperations
-from ._integration_runtime_operations_async import IntegrationRuntimeOperations
-from ._integration_runtime_object_metadata_operations_async import IntegrationRuntimeObjectMetadataOperations
-from ._integration_runtime_node_operations_async import IntegrationRuntimeNodeOperations
-from ._linked_service_operations_async import LinkedServiceOperations
-from ._dataset_operations_async import DatasetOperations
-from ._pipeline_operations_async import PipelineOperations
-from ._pipeline_run_operations_async import PipelineRunOperations
-from ._activity_run_operations_async import ActivityRunOperations
-from ._trigger_operations_async import TriggerOperations
-from ._trigger_run_operations_async import TriggerRunOperations
-from ._data_flow_operations_async import DataFlowOperations
-from ._data_flow_debug_session_operations_async import DataFlowDebugSessionOperations
-from ._managed_virtual_network_operations_async import ManagedVirtualNetworkOperations
-from ._managed_private_endpoint_operations_async import ManagedPrivateEndpointOperations
-
-__all__ = [
- 'OperationOperations',
- 'FactoryOperations',
- 'ExposureControlOperations',
- 'IntegrationRuntimeOperations',
- 'IntegrationRuntimeObjectMetadataOperations',
- 'IntegrationRuntimeNodeOperations',
- 'LinkedServiceOperations',
- 'DatasetOperations',
- 'PipelineOperations',
- 'PipelineRunOperations',
- 'ActivityRunOperations',
- 'TriggerOperations',
- 'TriggerRunOperations',
- 'DataFlowOperations',
- 'DataFlowDebugSessionOperations',
- 'ManagedVirtualNetworkOperations',
- 'ManagedPrivateEndpointOperations',
-]
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py
deleted file mode 100644
index 0d2e56be08b..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-import datetime
-from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class ActivityRunOperations:
- """ActivityRunOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def query_by_pipeline_run(
- self,
- resource_group_name: str,
- factory_name: str,
- run_id: str,
- last_updated_after: datetime.datetime,
- last_updated_before: datetime.datetime,
- continuation_token_parameter: Optional[str] = None,
- filters: Optional[List["models.RunQueryFilter"]] = None,
- order_by: Optional[List["models.RunQueryOrderBy"]] = None,
- **kwargs
- ) -> "models.ActivityRunsQueryResponse":
- """Query activity runs based on input filter conditions.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_after: ~datetime.datetime
- :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_before: ~datetime.datetime
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param filters: List of filters.
- :type filters: list[~data_factory_management_client.models.RunQueryFilter]
- :param order_by: List of OrderBy option.
- :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ActivityRunsQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_pipeline_run.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'RunFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py
deleted file mode 100644
index f1bf8ee8f73..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py
+++ /dev/null
@@ -1,551 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.mgmt.core.exceptions import ARMErrorFormat
-from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class DataFlowDebugSessionOperations:
- """DataFlowDebugSessionOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def _create_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- compute_type: Optional[str] = None,
- core_count: Optional[int] = None,
- time_to_live: Optional[int] = None,
- name: Optional[str] = None,
- properties: Optional["models.IntegrationRuntime"] = None,
- **kwargs
- ) -> Optional["models.CreateDataFlowDebugSessionResponse"]:
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self._create_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- response_headers = {}
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response)
-
- if response.status_code == 202:
- response_headers['location']=self._deserialize('str', response.headers.get('location'))
-
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
- _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore
-
- async def begin_create(
- self,
- resource_group_name: str,
- factory_name: str,
- compute_type: Optional[str] = None,
- core_count: Optional[int] = None,
- time_to_live: Optional[int] = None,
- name: Optional[str] = None,
- properties: Optional["models.IntegrationRuntime"] = None,
- **kwargs
- ) -> AsyncLROPoller["models.CreateDataFlowDebugSessionResponse"]:
- """Creates a data flow debug session.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param compute_type: Compute type of the cluster. The value will be overwritten by the same
- setting in integration runtime if provided.
- :type compute_type: str
- :param core_count: Core count of the cluster. The value will be overwritten by the same setting
- in integration runtime if provided.
- :type core_count: int
- :param time_to_live: Time to live setting of the cluster in minutes.
- :type time_to_live: int
- :param name: The resource name.
- :type name: str
- :param properties: Integration runtime properties.
- :type properties: ~data_factory_management_client.models.IntegrationRuntime
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either CreateDataFlowDebugSessionResponse or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.CreateDataFlowDebugSessionResponse]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.CreateDataFlowDebugSessionResponse"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._create_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- compute_type=compute_type,
- core_count=core_count,
- time_to_live=time_to_live,
- name=name,
- properties=properties,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} # type: ignore
-
- def query_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.QueryDataFlowDebugSessionsResponse"]:
- """Query all active data flow debug sessions.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either QueryDataFlowDebugSessionsResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.QueryDataFlowDebugSessionsResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.post(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('QueryDataFlowDebugSessionsResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions'} # type: ignore
-
- async def add_data_flow(
- self,
- resource_group_name: str,
- factory_name: str,
- session_id: Optional[str] = None,
- datasets: Optional[List["models.DatasetDebugResource"]] = None,
- linked_services: Optional[List["models.LinkedServiceDebugResource"]] = None,
- source_settings: Optional[List["models.DataFlowSourceSetting"]] = None,
- parameters: Optional[Dict[str, object]] = None,
- dataset_parameters: Optional[object] = None,
- folder_path: Optional[object] = None,
- reference_name: Optional[str] = None,
- name: Optional[str] = None,
- properties: Optional["models.DataFlow"] = None,
- **kwargs
- ) -> "models.AddDataFlowToDebugSessionResponse":
- """Add a data flow into debug session.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param session_id: The ID of data flow debug session.
- :type session_id: str
- :param datasets: List of datasets.
- :type datasets: list[~data_factory_management_client.models.DatasetDebugResource]
- :param linked_services: List of linked services.
- :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource]
- :param source_settings: Source setting for data flow debug.
- :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting]
- :param parameters: Data flow parameters.
- :type parameters: dict[str, object]
- :param dataset_parameters: Parameters for dataset.
- :type dataset_parameters: object
- :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType
- string).
- :type folder_path: object
- :param reference_name: Reference LinkedService name.
- :type reference_name: str
- :param name: The resource name.
- :type name: str
- :param properties: Data flow properties.
- :type properties: ~data_factory_management_client.models.DataFlow
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: AddDataFlowToDebugSessionResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.AddDataFlowToDebugSessionResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, name=name, properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.add_data_flow.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(request, 'DataFlowDebugPackage')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('AddDataFlowToDebugSessionResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- add_data_flow.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- session_id: Optional[str] = None,
- **kwargs
- ) -> None:
- """Deletes a data flow debug session.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param session_id: The ID of data flow debug session.
- :type session_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession'} # type: ignore
-
- async def _execute_command_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- session_id: Optional[str] = None,
- command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None,
- command_payload: Optional["models.DataFlowDebugCommandPayload"] = None,
- **kwargs
- ) -> Optional["models.DataFlowDebugCommandResponse"]:
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self._execute_command_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- response_headers = {}
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response)
-
- if response.status_code == 202:
- response_headers['location']=self._deserialize('str', response.headers.get('location'))
-
- if cls:
- return cls(pipeline_response, deserialized, response_headers)
-
- return deserialized
- _execute_command_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore
-
- async def begin_execute_command(
- self,
- resource_group_name: str,
- factory_name: str,
- session_id: Optional[str] = None,
- command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None,
- command_payload: Optional["models.DataFlowDebugCommandPayload"] = None,
- **kwargs
- ) -> AsyncLROPoller["models.DataFlowDebugCommandResponse"]:
- """Execute a data flow debug command.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param session_id: The ID of data flow debug session.
- :type session_id: str
- :param command: The command type.
- :type command: str or ~data_factory_management_client.models.DataFlowDebugCommandType
- :param command_payload: The command payload object.
- :type command_payload: ~data_factory_management_client.models.DataFlowDebugCommandPayload
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either DataFlowDebugCommandResponse or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.DataFlowDebugCommandResponse]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowDebugCommandResponse"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._execute_command_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- session_id=session_id,
- command=command,
- command_payload=command_payload,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('DataFlowDebugCommandResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py
deleted file mode 100644
index b5c2e5656ce..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py
+++ /dev/null
@@ -1,309 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class DataFlowOperations:
- """DataFlowOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- data_flow_name: str,
- properties: "models.DataFlow",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.DataFlowResource":
- """Creates or updates a data flow.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param data_flow_name: The data flow name.
- :type data_flow_name: str
- :param properties: Data flow properties.
- :type properties: ~data_factory_management_client.models.DataFlow
- :param if_match: ETag of the data flow entity. Should only be specified for update, for which
- it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataFlowResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DataFlowResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- data_flow = models.DataFlowResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(data_flow, 'DataFlowResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('DataFlowResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- data_flow_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> "models.DataFlowResource":
- """Gets a data flow.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param data_flow_name: The data flow name.
- :type data_flow_name: str
- :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the
- ETag matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataFlowResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DataFlowResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('DataFlowResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- data_flow_name: str,
- **kwargs
- ) -> None:
- """Deletes a data flow.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param data_flow_name: The data flow name.
- :type data_flow_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.DataFlowListResponse"]:
- """Lists data flows.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either DataFlowListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DataFlowListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('DataFlowListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py
deleted file mode 100644
index a8be0369365..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py
+++ /dev/null
@@ -1,311 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class DatasetOperations:
- """DatasetOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.DatasetListResponse"]:
- """Lists datasets.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either DatasetListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.DatasetListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('DatasetListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- dataset_name: str,
- properties: "models.Dataset",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.DatasetResource":
- """Creates or updates a dataset.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param dataset_name: The dataset name.
- :type dataset_name: str
- :param properties: Dataset properties.
- :type properties: ~data_factory_management_client.models.Dataset
- :param if_match: ETag of the dataset entity. Should only be specified for update, for which it
- should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DatasetResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DatasetResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- dataset = models.DatasetResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(dataset, 'DatasetResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('DatasetResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- dataset_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> Optional["models.DatasetResource"]:
- """Gets a dataset.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param dataset_name: The dataset name.
- :type dataset_name: str
- :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag
- matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DatasetResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DatasetResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('DatasetResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- dataset_name: str,
- **kwargs
- ) -> None:
- """Deletes a dataset.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param dataset_name: The dataset name.
- :type dataset_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py
deleted file mode 100644
index b20acb1c3c8..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py
+++ /dev/null
@@ -1,241 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class ExposureControlOperations:
- """ExposureControlOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def get_feature_value(
- self,
- location_id: str,
- feature_name: Optional[str] = None,
- feature_type: Optional[str] = None,
- **kwargs
- ) -> "models.ExposureControlResponse":
- """Get exposure control feature for specific location.
-
- :param location_id: The location identifier.
- :type location_id: str
- :param feature_name: The feature name.
- :type feature_name: str
- :param feature_type: The feature type.
- :type feature_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ExposureControlResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ExposureControlResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get_feature_value.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'locationId': self._serialize.url("location_id", location_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ExposureControlResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_feature_value.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/getFeatureValue'} # type: ignore
-
- async def get_feature_value_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- feature_name: Optional[str] = None,
- feature_type: Optional[str] = None,
- **kwargs
- ) -> "models.ExposureControlResponse":
- """Get exposure control feature for specific factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param feature_name: The feature name.
- :type feature_name: str
- :param feature_type: The feature type.
- :type feature_type: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ExposureControlResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ExposureControlResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get_feature_value_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ExposureControlResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore
-
- async def query_feature_value_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- exposure_control_requests: List["models.ExposureControlRequest"],
- **kwargs
- ) -> "models.ExposureControlBatchResponse":
- """Get list of exposure control features for specific factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param exposure_control_requests: List of exposure control features.
- :type exposure_control_requests: list[~data_factory_management_client.models.ExposureControlRequest]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ExposureControlBatchResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- exposure_control_batch_request = models.ExposureControlBatchRequest(exposure_control_requests=exposure_control_requests)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_feature_value_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ExposureControlBatchResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py
deleted file mode 100644
index 46f37c1a6f7..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py
+++ /dev/null
@@ -1,658 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class FactoryOperations:
- """FactoryOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list(
- self,
- **kwargs
- ) -> AsyncIterable["models.FactoryListResponse"]:
- """Lists factories under the specified subscription.
-
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either FactoryListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('FactoryListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore
-
- async def configure_factory_repo(
- self,
- location_id: str,
- factory_resource_id: Optional[str] = None,
- repo_configuration: Optional["models.FactoryRepoConfiguration"] = None,
- **kwargs
- ) -> "models.Factory":
- """Updates a factory's repo information.
-
- :param location_id: The location identifier.
- :type location_id: str
- :param factory_resource_id: The factory resource id.
- :type factory_resource_id: str
- :param repo_configuration: Git repo information of the factory.
- :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.configure_factory_repo.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'locationId': self._serialize.url("location_id", location_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore
-
- def list_by_resource_group(
- self,
- resource_group_name: str,
- **kwargs
- ) -> AsyncIterable["models.FactoryListResponse"]:
- """Lists factories.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either FactoryListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.FactoryListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_resource_group.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('FactoryListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- if_match: Optional[str] = None,
- location: Optional[str] = None,
- tags: Optional[Dict[str, str]] = None,
- identity: Optional["models.FactoryIdentity"] = None,
- repo_configuration: Optional["models.FactoryRepoConfiguration"] = None,
- global_parameters: Optional[Dict[str, "models.GlobalParameterSpecification"]] = None,
- **kwargs
- ) -> "models.Factory":
- """Creates or updates a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param if_match: ETag of the factory entity. Should only be specified for update, for which it
- should match existing entity or can be * for unconditional update.
- :type if_match: str
- :param location: The resource location.
- :type location: str
- :param tags: The resource tags.
- :type tags: dict[str, str]
- :param identity: Managed service identity of the factory.
- :type identity: ~data_factory_management_client.models.FactoryIdentity
- :param repo_configuration: Git repo information of the factory.
- :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration
- :param global_parameters: List of parameters for factory.
- :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(factory, 'Factory')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- async def update(
- self,
- resource_group_name: str,
- factory_name: str,
- tags: Optional[Dict[str, str]] = None,
- identity: Optional["models.FactoryIdentity"] = None,
- **kwargs
- ) -> "models.Factory":
- """Updates a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param tags: The resource tags.
- :type tags: dict[str, str]
- :param identity: Managed service identity of the factory.
- :type identity: ~data_factory_management_client.models.FactoryIdentity
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> Optional["models.Factory"]:
- """Gets a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag
- matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> None:
- """Deletes a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- async def get_git_hub_access_token(
- self,
- resource_group_name: str,
- factory_name: str,
- git_hub_access_code: str,
- git_hub_access_token_base_url: str,
- git_hub_client_id: Optional[str] = None,
- **kwargs
- ) -> "models.GitHubAccessTokenResponse":
- """Get GitHub Access Token.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param git_hub_access_code: GitHub access code.
- :type git_hub_access_code: str
- :param git_hub_access_token_base_url: GitHub access token base URL.
- :type git_hub_access_token_base_url: str
- :param git_hub_client_id: GitHub application client ID.
- :type git_hub_client_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: GitHubAccessTokenResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get_git_hub_access_token.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore
-
- async def get_data_plane_access(
- self,
- resource_group_name: str,
- factory_name: str,
- permissions: Optional[str] = None,
- access_resource_path: Optional[str] = None,
- profile_name: Optional[str] = None,
- start_time: Optional[str] = None,
- expire_time: Optional[str] = None,
- **kwargs
- ) -> "models.AccessPolicyResponse":
- """Get Data Plane access.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param permissions: The string with permissions for Data Plane access. Currently only 'r' is
- supported which grants read only access.
- :type permissions: str
- :param access_resource_path: The resource path to get access relative to factory. Currently
- only empty string is supported which corresponds to the factory resource.
- :type access_resource_path: str
- :param profile_name: The name of the profile. Currently only the default is supported. The
- default value is DefaultProfile.
- :type profile_name: str
- :param start_time: Start time for the token. If not specified the current time will be used.
- :type start_time: str
- :param expire_time: Expiration time for the token. Maximum duration for the token is eight
- hours and by default the token will expire in eight hours.
- :type expire_time: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: AccessPolicyResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.AccessPolicyResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get_data_plane_access.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(policy, 'UserAccessPolicy')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('AccessPolicyResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py
deleted file mode 100644
index a6022196653..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py
+++ /dev/null
@@ -1,301 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class IntegrationRuntimeNodeOperations:
- """IntegrationRuntimeNodeOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- node_name: str,
- **kwargs
- ) -> "models.SelfHostedIntegrationRuntimeNode":
- """Gets a self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- node_name: str,
- **kwargs
- ) -> None:
- """Deletes a self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore
-
- async def update(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- node_name: str,
- concurrent_jobs_limit: Optional[int] = None,
- **kwargs
- ) -> "models.SelfHostedIntegrationRuntimeNode":
- """Updates a self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration
- runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.
- :type concurrent_jobs_limit: int
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore
-
- async def get_ip_address(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- node_name: str,
- **kwargs
- ) -> "models.IntegrationRuntimeNodeIpAddress":
- """Get the IP address of self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_ip_address.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py
deleted file mode 100644
index 70df0716c21..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py
+++ /dev/null
@@ -1,230 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.mgmt.core.exceptions import ARMErrorFormat
-from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class IntegrationRuntimeObjectMetadataOperations:
- """IntegrationRuntimeObjectMetadataOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def _refresh_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> Optional["models.SsisObjectMetadataStatusResponse"]:
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._refresh_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _refresh_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore
-
- async def begin_refresh(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> AsyncLROPoller["models.SsisObjectMetadataStatusResponse"]:
- """Refresh a SSIS integration runtime object metadata.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either SsisObjectMetadataStatusResponse or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.SsisObjectMetadataStatusResponse]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataStatusResponse"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._refresh_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- integration_runtime_name=integration_runtime_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('SsisObjectMetadataStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_refresh.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/refreshObjectMetadata'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- metadata_path: Optional[str] = None,
- **kwargs
- ) -> "models.SsisObjectMetadataListResponse":
- """Get a SSIS integration runtime object metadata by specified path. The return is pageable
- metadata list.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param metadata_path: Metadata path.
- :type metadata_path: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: SsisObjectMetadataListResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.SsisObjectMetadataListResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- if get_metadata_request is not None:
- body_content = self._serialize.body(get_metadata_request, 'GetSsisObjectMetadataRequest')
- else:
- body_content = None
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('SsisObjectMetadataListResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getObjectMetadata'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py
deleted file mode 100644
index 82b285c7a74..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py
+++ /dev/null
@@ -1,1176 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.mgmt.core.exceptions import ARMErrorFormat
-from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class IntegrationRuntimeOperations:
- """IntegrationRuntimeOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.IntegrationRuntimeListResponse"]:
- """Lists integration runtimes.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- properties: "models.IntegrationRuntime",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.IntegrationRuntimeResource":
- """Creates or updates an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param properties: Integration runtime properties.
- :type properties: ~data_factory_management_client.models.IntegrationRuntime
- :param if_match: ETag of the integration runtime entity. Should only be specified for update,
- for which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- integration_runtime = models.IntegrationRuntimeResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> Optional["models.IntegrationRuntimeResource"]:
- """Gets an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param if_none_match: ETag of the integration runtime entity. Should only be specified for get.
- If the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- async def update(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- auto_update: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]] = None,
- update_delay_offset: Optional[str] = None,
- **kwargs
- ) -> "models.IntegrationRuntimeResource":
- """Updates an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param auto_update: Enables or disables the auto-update feature of the self-hosted integration
- runtime. See https://go.microsoft.com/fwlink/?linkid=854189.
- :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate
- :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The
- integration runtime auto update will happen on that time.
- :type update_delay_offset: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> None:
- """Deletes an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- async def get_status(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> "models.IntegrationRuntimeStatusResponse":
- """Gets detailed status information for an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeStatusResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_status.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore
-
- async def get_connection_info(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> "models.IntegrationRuntimeConnectionInfo":
- """Gets the on-premises integration runtime connection information for encrypting the on-premises
- data source credentials.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeConnectionInfo, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_connection_info.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore
-
- async def regenerate_auth_key(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- key_name: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]] = None,
- **kwargs
- ) -> "models.IntegrationRuntimeAuthKeys":
- """Regenerates the authentication key for an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param key_name: The name of the authentication key to regenerate.
- :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeAuthKeys, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.regenerate_auth_key.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore
-
- async def list_auth_key(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> "models.IntegrationRuntimeAuthKeys":
- """Retrieves the authentication keys for an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeAuthKeys, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.list_auth_key.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore
-
- async def _start_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> Optional["models.IntegrationRuntimeStatusResponse"]:
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._start_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore
-
- async def begin_start(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> AsyncLROPoller["models.IntegrationRuntimeStatusResponse"]:
- """Starts a ManagedReserved type integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._start_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- integration_runtime_name=integration_runtime_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore
-
- async def _stop_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> None:
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._stop_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore
-
- async def begin_stop(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> AsyncLROPoller[None]:
- """Stops a ManagedReserved type integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[None]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._stop_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- integration_runtime_name=integration_runtime_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- if cls:
- return cls(pipeline_response, None, {})
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore
-
- async def sync_credentials(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> None:
- """Force the integration runtime to synchronize credentials across integration runtime nodes, and
- this will override the credentials across all worker nodes with those available on the
- dispatcher node. If you already have the latest credential backup file, you should manually
- import it (preferred) on any self-hosted integration runtime node than using this API directly.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.sync_credentials.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore
-
- async def get_monitoring_data(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> "models.IntegrationRuntimeMonitoringData":
- """Get the integration runtime monitoring data, which includes the monitor data for all the nodes
- under this integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeMonitoringData, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_monitoring_data.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore
-
- async def upgrade(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- **kwargs
- ) -> None:
- """Upgrade self-hosted integration runtime to latest version if availability.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.upgrade.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore
-
- async def remove_link(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- linked_factory_name: str,
- **kwargs
- ) -> None:
- """Remove all linked integration runtimes under specific data factory in a self-hosted integration
- runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param linked_factory_name: The data factory name for linked integration runtime.
- :type linked_factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.remove_link.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore
-
- async def create_linked_integration_runtime(
- self,
- resource_group_name: str,
- factory_name: str,
- integration_runtime_name: str,
- name: Optional[str] = None,
- subscription_id: Optional[str] = None,
- data_factory_name: Optional[str] = None,
- data_factory_location: Optional[str] = None,
- **kwargs
- ) -> "models.IntegrationRuntimeStatusResponse":
- """Create a linked integration runtime entry in a shared integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param name: The name of the linked integration runtime.
- :type name: str
- :param subscription_id: The ID of the subscription that the linked integration runtime belongs
- to.
- :type subscription_id: str
- :param data_factory_name: The name of the data factory that the linked integration runtime
- belongs to.
- :type data_factory_name: str
- :param data_factory_location: The location of the data factory that the linked integration
- runtime belongs to.
- :type data_factory_location: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeStatusResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_linked_integration_runtime.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py
deleted file mode 100644
index 56e9e6f663a..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py
+++ /dev/null
@@ -1,312 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class LinkedServiceOperations:
- """LinkedServiceOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.LinkedServiceListResponse"]:
- """Lists linked services.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.LinkedServiceListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- linked_service_name: str,
- properties: "models.LinkedService",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.LinkedServiceResource":
- """Creates or updates a linked service.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param linked_service_name: The linked service name.
- :type linked_service_name: str
- :param properties: Properties of linked service.
- :type properties: ~data_factory_management_client.models.LinkedService
- :param if_match: ETag of the linkedService entity. Should only be specified for update, for
- which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: LinkedServiceResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.LinkedServiceResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- linked_service = models.LinkedServiceResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(linked_service, 'LinkedServiceResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('LinkedServiceResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- linked_service_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> Optional["models.LinkedServiceResource"]:
- """Gets a linked service.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param linked_service_name: The linked service name.
- :type linked_service_name: str
- :param if_none_match: ETag of the linked service entity. Should only be specified for get. If
- the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: LinkedServiceResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.LinkedServiceResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('LinkedServiceResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- linked_service_name: str,
- **kwargs
- ) -> None:
- """Deletes a linked service.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param linked_service_name: The linked service name.
- :type linked_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py
deleted file mode 100644
index 3a899779963..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py
+++ /dev/null
@@ -1,336 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class ManagedPrivateEndpointOperations:
- """ManagedPrivateEndpointOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- managed_virtual_network_name: str,
- **kwargs
- ) -> AsyncIterable["models.ManagedPrivateEndpointListResponse"]:
- """Lists managed private endpoints.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- managed_virtual_network_name: str,
- managed_private_endpoint_name: str,
- if_match: Optional[str] = None,
- connection_state: Optional["models.ConnectionStateProperties"] = None,
- fqdns: Optional[List[str]] = None,
- group_id: Optional[str] = None,
- private_link_resource_id: Optional[str] = None,
- **kwargs
- ) -> "models.ManagedPrivateEndpointResource":
- """Creates or updates a managed private endpoint.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param managed_private_endpoint_name: Managed private endpoint name.
- :type managed_private_endpoint_name: str
- :param if_match: ETag of the managed private endpoint entity. Should only be specified for
- update, for which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :param connection_state: The managed private endpoint connection state.
- :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties
- :param fqdns: Fully qualified domain names.
- :type fqdns: list[str]
- :param group_id: The groupId to which the managed private endpoint is created.
- :type group_id: str
- :param private_link_resource_id: The ARM resource ID of the resource to which the managed
- private endpoint is created.
- :type private_link_resource_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedPrivateEndpointResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- managed_virtual_network_name: str,
- managed_private_endpoint_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> "models.ManagedPrivateEndpointResource":
- """Gets a managed private endpoint.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param managed_private_endpoint_name: Managed private endpoint name.
- :type managed_private_endpoint_name: str
- :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for
- get. If the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedPrivateEndpointResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- managed_virtual_network_name: str,
- managed_private_endpoint_name: str,
- **kwargs
- ) -> None:
- """Deletes a managed private endpoint.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param managed_private_endpoint_name: Managed private endpoint name.
- :type managed_private_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py
deleted file mode 100644
index 2152988d7ef..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py
+++ /dev/null
@@ -1,255 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class ManagedVirtualNetworkOperations:
- """ManagedVirtualNetworkOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.ManagedVirtualNetworkListResponse"]:
- """Lists managed Virtual Networks.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- managed_virtual_network_name: str,
- properties: "models.ManagedVirtualNetwork",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.ManagedVirtualNetworkResource":
- """Creates or updates a managed Virtual Network.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param properties: Managed Virtual Network properties.
- :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork
- :param if_match: ETag of the managed Virtual Network entity. Should only be specified for
- update, for which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedVirtualNetworkResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- managed_virtual_network_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> "models.ManagedVirtualNetworkResource":
- """Gets a managed Virtual Network.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for
- get. If the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedVirtualNetworkResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py
deleted file mode 100644
index 83206d77039..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class OperationOperations:
- """OperationOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list(
- self,
- **kwargs
- ) -> AsyncIterable["models.OperationListResponse"]:
- """Lists the available Azure Data Factory API operations.
-
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either OperationListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.OperationListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list.metadata['url'] # type: ignore
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('OperationListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py
deleted file mode 100644
index 34c7453f951..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py
+++ /dev/null
@@ -1,405 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class PipelineOperations:
- """PipelineOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.PipelineListResponse"]:
- """Lists pipelines.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either PipelineListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.PipelineListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('PipelineListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- pipeline_name: str,
- pipeline: "models.PipelineResource",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.PipelineResource":
- """Creates or updates a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :param pipeline: Pipeline resource definition.
- :type pipeline: ~data_factory_management_client.models.PipelineResource
- :param if_match: ETag of the pipeline entity. Should only be specified for update, for which
- it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(pipeline, 'PipelineResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('PipelineResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- pipeline_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> Optional["models.PipelineResource"]:
- """Gets a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the
- ETag matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('PipelineResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- pipeline_name: str,
- **kwargs
- ) -> None:
- """Deletes a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore
-
- async def create_run(
- self,
- resource_group_name: str,
- factory_name: str,
- pipeline_name: str,
- reference_pipeline_run_id: Optional[str] = None,
- is_recovery: Optional[bool] = None,
- start_activity_name: Optional[str] = None,
- start_from_failure: Optional[bool] = None,
- parameters: Optional[Dict[str, object]] = None,
- **kwargs
- ) -> "models.CreateRunResponse":
- """Creates a run of a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the
- parameters of the specified run will be used to create a new run.
- :type reference_pipeline_run_id: str
- :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified
- referenced pipeline run and the new run will be grouped under the same groupId.
- :type is_recovery: bool
- :param start_activity_name: In recovery mode, the rerun will start from this activity. If not
- specified, all activities will run.
- :type start_activity_name: str
- :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed
- activities. The property will be used only if startActivityName is not specified.
- :type start_from_failure: bool
- :param parameters: Parameters of the pipeline run. These parameters will be used only if the
- runId is not specified.
- :type parameters: dict[str, object]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: CreateRunResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.CreateRunResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_run.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if reference_pipeline_run_id is not None:
- query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str')
- if is_recovery is not None:
- query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool')
- if start_activity_name is not None:
- query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str')
- if start_from_failure is not None:
- query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- if parameters is not None:
- body_content = self._serialize.body(parameters, '{object}')
- else:
- body_content = None
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('CreateRunResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py
deleted file mode 100644
index 5cdfd09fe01..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py
+++ /dev/null
@@ -1,243 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-import datetime
-from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class PipelineRunOperations:
- """PipelineRunOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def query_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- last_updated_after: datetime.datetime,
- last_updated_before: datetime.datetime,
- continuation_token_parameter: Optional[str] = None,
- filters: Optional[List["models.RunQueryFilter"]] = None,
- order_by: Optional[List["models.RunQueryOrderBy"]] = None,
- **kwargs
- ) -> "models.PipelineRunsQueryResponse":
- """Query pipeline runs in the factory based on input filter conditions.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_after: ~datetime.datetime
- :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_before: ~datetime.datetime
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param filters: List of filters.
- :type filters: list[~data_factory_management_client.models.RunQueryFilter]
- :param order_by: List of OrderBy option.
- :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineRunsQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'RunFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- run_id: str,
- **kwargs
- ) -> "models.PipelineRun":
- """Get a pipeline run by its run ID.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineRun, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineRun
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('PipelineRun', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore
-
- async def cancel(
- self,
- resource_group_name: str,
- factory_name: str,
- run_id: str,
- is_recursive: Optional[bool] = None,
- **kwargs
- ) -> None:
- """Cancel a pipeline run by its run ID.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current
- pipeline.
- :type is_recursive: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.cancel.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- if is_recursive is not None:
- query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool')
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py
deleted file mode 100644
index f4669b45bc2..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py
+++ /dev/null
@@ -1,877 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
-import warnings
-
-from azure.core.async_paging import AsyncItemPaged, AsyncList
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
-from azure.mgmt.core.exceptions import ARMErrorFormat
-from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class TriggerOperations:
- """TriggerOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- **kwargs
- ) -> AsyncIterable["models.TriggerListResponse"]:
- """Lists triggers.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either TriggerListResponse or the result of cls(response)
- :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_factory_management_client.models.TriggerListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- async def extract_data(pipeline_response):
- deserialized = self._deserialize('TriggerListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, AsyncList(list_of_elem)
-
- async def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return AsyncItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore
-
- async def query_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- continuation_token_parameter: Optional[str] = None,
- parent_trigger_name: Optional[str] = None,
- **kwargs
- ) -> "models.TriggerQueryResponse":
- """Query triggers.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun
- triggers.
- :type parent_trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore
-
- async def create_or_update(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- properties: "models.Trigger",
- if_match: Optional[str] = None,
- **kwargs
- ) -> "models.TriggerResource":
- """Creates or updates a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param properties: Properties of the trigger.
- :type properties: ~data_factory_management_client.models.Trigger
- :param if_match: ETag of the trigger entity. Should only be specified for update, for which it
- should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- trigger = models.TriggerResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(trigger, 'TriggerResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore
-
- async def get(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- if_none_match: Optional[str] = None,
- **kwargs
- ) -> Optional["models.TriggerResource"]:
- """Gets a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag
- matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('TriggerResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore
-
- async def delete(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> None:
- """Deletes a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore
-
- async def _subscribe_to_event_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> Optional["models.TriggerSubscriptionOperationStatus"]:
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._subscribe_to_event_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore
-
- async def begin_subscribe_to_event(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]:
- """Subscribe event trigger to events.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._subscribe_to_event_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore
-
- async def get_event_subscription_status(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> "models.TriggerSubscriptionOperationStatus":
- """Get a trigger's event subscription status.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerSubscriptionOperationStatus, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_event_subscription_status.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore
-
- async def _unsubscribe_from_event_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> Optional["models.TriggerSubscriptionOperationStatus"]:
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore
-
- async def begin_unsubscribe_from_event(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> AsyncLROPoller["models.TriggerSubscriptionOperationStatus"]:
- """Unsubscribe event trigger from events.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._unsubscribe_from_event_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore
-
- async def _start_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> None:
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._start_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore
-
- async def begin_start(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> AsyncLROPoller[None]:
- """Starts a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[None]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._start_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- if cls:
- return cls(pipeline_response, None, {})
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore
-
- async def _stop_initial(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> None:
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._stop_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore
-
- async def begin_stop(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- **kwargs
- ) -> AsyncLROPoller[None]:
- """Stops a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
- :rtype: ~azure.core.polling.AsyncLROPoller[None]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = await self._stop_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- if cls:
- return cls(pipeline_response, None, {})
-
- if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = AsyncNoPolling()
- else: polling_method = polling
- if cont_token:
- return AsyncLROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py
deleted file mode 100644
index 3401f9c95c1..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py
+++ /dev/null
@@ -1,241 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-import datetime
-from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from ... import models
-
-T = TypeVar('T')
-ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
-
-class TriggerRunOperations:
- """TriggerRunOperations async operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer) -> None:
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- async def rerun(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- run_id: str,
- **kwargs
- ) -> None:
- """Rerun single trigger instance by runId.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.rerun.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore
-
- async def cancel(
- self,
- resource_group_name: str,
- factory_name: str,
- trigger_name: str,
- run_id: str,
- **kwargs
- ) -> None:
- """Cancel a single trigger instance by runId.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.cancel.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore
-
- async def query_by_factory(
- self,
- resource_group_name: str,
- factory_name: str,
- last_updated_after: datetime.datetime,
- last_updated_before: datetime.datetime,
- continuation_token_parameter: Optional[str] = None,
- filters: Optional[List["models.RunQueryFilter"]] = None,
- order_by: Optional[List["models.RunQueryOrderBy"]] = None,
- **kwargs
- ) -> "models.TriggerRunsQueryResponse":
- """Query trigger runs.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_after: ~datetime.datetime
- :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_before: ~datetime.datetime
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param filters: List of filters.
- :type filters: list[~data_factory_management_client.models.RunQueryFilter]
- :param order_by: List of OrderBy option.
- :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerRunsQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'RunFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py
index 1f1ab102631..5f74de04cd1 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py
@@ -157,6 +157,9 @@
from ._models_py3 import CreateDataFlowDebugSessionResponse
from ._models_py3 import CreateLinkedIntegrationRuntimeRequest
from ._models_py3 import CreateRunResponse
+ from ._models_py3 import Credential
+ from ._models_py3 import CredentialReference
+ from ._models_py3 import CredentialResource
from ._models_py3 import CustomActivity
from ._models_py3 import CustomActivityReferenceObject
from ._models_py3 import CustomDataSourceLinkedService
@@ -277,6 +280,7 @@
from ._models_py3 import GetSsisObjectMetadataRequest
from ._models_py3 import GitHubAccessTokenRequest
from ._models_py3 import GitHubAccessTokenResponse
+ from ._models_py3 import GitHubClientSecret
from ._models_py3 import GlobalParameterSpecification
from ._models_py3 import GoogleAdWordsLinkedService
from ._models_py3 import GoogleAdWordsObjectDataset
@@ -371,6 +375,7 @@
from ._models_py3 import MagentoLinkedService
from ._models_py3 import MagentoObjectDataset
from ._models_py3 import MagentoSource
+ from ._models_py3 import ManagedIdentityCredential
from ._models_py3 import ManagedIntegrationRuntime
from ._models_py3 import ManagedIntegrationRuntimeError
from ._models_py3 import ManagedIntegrationRuntimeNode
@@ -390,12 +395,14 @@
from ._models_py3 import MarketoLinkedService
from ._models_py3 import MarketoObjectDataset
from ._models_py3 import MarketoSource
+ from ._models_py3 import MetadataItem
from ._models_py3 import MicrosoftAccessLinkedService
from ._models_py3 import MicrosoftAccessSink
from ._models_py3 import MicrosoftAccessSource
from ._models_py3 import MicrosoftAccessTableDataset
from ._models_py3 import MongoDbAtlasCollectionDataset
from ._models_py3 import MongoDbAtlasLinkedService
+ from ._models_py3 import MongoDbAtlasSink
from ._models_py3 import MongoDbAtlasSource
from ._models_py3 import MongoDbCollectionDataset
from ._models_py3 import MongoDbCursorMethodsProperties
@@ -403,6 +410,7 @@
from ._models_py3 import MongoDbSource
from ._models_py3 import MongoDbV2CollectionDataset
from ._models_py3 import MongoDbV2LinkedService
+ from ._models_py3 import MongoDbV2Sink
from ._models_py3 import MongoDbV2Source
from ._models_py3 import MultiplePipelineTrigger
from ._models_py3 import MySqlLinkedService
@@ -551,6 +559,7 @@
from ._models_py3 import ServiceNowLinkedService
from ._models_py3 import ServiceNowObjectDataset
from ._models_py3 import ServiceNowSource
+ from ._models_py3 import ServicePrincipalCredential
from ._models_py3 import SetVariableActivity
from ._models_py3 import SftpLocation
from ._models_py3 import SftpReadSettings
@@ -575,6 +584,7 @@
from ._models_py3 import SqlAlwaysEncryptedProperties
from ._models_py3 import SqlDwSink
from ._models_py3 import SqlDwSource
+ from ._models_py3 import SqlDwUpsertSettings
from ._models_py3 import SqlMiSink
from ._models_py3 import SqlMiSource
from ._models_py3 import SqlPartitionSettings
@@ -585,6 +595,7 @@
from ._models_py3 import SqlServerTableDataset
from ._models_py3 import SqlSink
from ._models_py3 import SqlSource
+ from ._models_py3 import SqlUpsertSettings
from ._models_py3 import SquareLinkedService
from ._models_py3 import SquareObjectDataset
from ._models_py3 import SquareSource
@@ -822,6 +833,9 @@
from ._models import CreateDataFlowDebugSessionResponse # type: ignore
from ._models import CreateLinkedIntegrationRuntimeRequest # type: ignore
from ._models import CreateRunResponse # type: ignore
+ from ._models import Credential # type: ignore
+ from ._models import CredentialReference # type: ignore
+ from ._models import CredentialResource # type: ignore
from ._models import CustomActivity # type: ignore
from ._models import CustomActivityReferenceObject # type: ignore
from ._models import CustomDataSourceLinkedService # type: ignore
@@ -942,6 +956,7 @@
from ._models import GetSsisObjectMetadataRequest # type: ignore
from ._models import GitHubAccessTokenRequest # type: ignore
from ._models import GitHubAccessTokenResponse # type: ignore
+ from ._models import GitHubClientSecret # type: ignore
from ._models import GlobalParameterSpecification # type: ignore
from ._models import GoogleAdWordsLinkedService # type: ignore
from ._models import GoogleAdWordsObjectDataset # type: ignore
@@ -1036,6 +1051,7 @@
from ._models import MagentoLinkedService # type: ignore
from ._models import MagentoObjectDataset # type: ignore
from ._models import MagentoSource # type: ignore
+ from ._models import ManagedIdentityCredential # type: ignore
from ._models import ManagedIntegrationRuntime # type: ignore
from ._models import ManagedIntegrationRuntimeError # type: ignore
from ._models import ManagedIntegrationRuntimeNode # type: ignore
@@ -1055,12 +1071,14 @@
from ._models import MarketoLinkedService # type: ignore
from ._models import MarketoObjectDataset # type: ignore
from ._models import MarketoSource # type: ignore
+ from ._models import MetadataItem # type: ignore
from ._models import MicrosoftAccessLinkedService # type: ignore
from ._models import MicrosoftAccessSink # type: ignore
from ._models import MicrosoftAccessSource # type: ignore
from ._models import MicrosoftAccessTableDataset # type: ignore
from ._models import MongoDbAtlasCollectionDataset # type: ignore
from ._models import MongoDbAtlasLinkedService # type: ignore
+ from ._models import MongoDbAtlasSink # type: ignore
from ._models import MongoDbAtlasSource # type: ignore
from ._models import MongoDbCollectionDataset # type: ignore
from ._models import MongoDbCursorMethodsProperties # type: ignore
@@ -1068,6 +1086,7 @@
from ._models import MongoDbSource # type: ignore
from ._models import MongoDbV2CollectionDataset # type: ignore
from ._models import MongoDbV2LinkedService # type: ignore
+ from ._models import MongoDbV2Sink # type: ignore
from ._models import MongoDbV2Source # type: ignore
from ._models import MultiplePipelineTrigger # type: ignore
from ._models import MySqlLinkedService # type: ignore
@@ -1216,6 +1235,7 @@
from ._models import ServiceNowLinkedService # type: ignore
from ._models import ServiceNowObjectDataset # type: ignore
from ._models import ServiceNowSource # type: ignore
+ from ._models import ServicePrincipalCredential # type: ignore
from ._models import SetVariableActivity # type: ignore
from ._models import SftpLocation # type: ignore
from ._models import SftpReadSettings # type: ignore
@@ -1240,6 +1260,7 @@
from ._models import SqlAlwaysEncryptedProperties # type: ignore
from ._models import SqlDwSink # type: ignore
from ._models import SqlDwSource # type: ignore
+ from ._models import SqlDwUpsertSettings # type: ignore
from ._models import SqlMiSink # type: ignore
from ._models import SqlMiSource # type: ignore
from ._models import SqlPartitionSettings # type: ignore
@@ -1250,6 +1271,7 @@
from ._models import SqlServerTableDataset # type: ignore
from ._models import SqlSink # type: ignore
from ._models import SqlSource # type: ignore
+ from ._models import SqlUpsertSettings # type: ignore
from ._models import SquareLinkedService # type: ignore
from ._models import SquareObjectDataset # type: ignore
from ._models import SquareSource # type: ignore
@@ -1356,7 +1378,6 @@
DependencyCondition,
DynamicsAuthenticationType,
DynamicsDeploymentType,
- DynamicsServicePrincipalCredentialType,
DynamicsSinkWriteBehavior,
EventSubscriptionStatus,
FactoryIdentityType,
@@ -1410,12 +1431,15 @@
SapTablePartitionOption,
SelfHostedIntegrationRuntimeNodeStatus,
ServiceNowAuthenticationType,
+ ServicePrincipalCredentialType,
SftpAuthenticationType,
SparkAuthenticationType,
SparkServerType,
SparkThriftTransportProtocol,
SqlAlwaysEncryptedAkvAuthType,
+ SqlDwWriteBehaviorEnum,
SqlPartitionOption,
+ SqlWriteBehaviorEnum,
SsisLogLocationType,
SsisObjectMetadataType,
SsisPackageLocationType,
@@ -1583,6 +1607,9 @@
'CreateDataFlowDebugSessionResponse',
'CreateLinkedIntegrationRuntimeRequest',
'CreateRunResponse',
+ 'Credential',
+ 'CredentialReference',
+ 'CredentialResource',
'CustomActivity',
'CustomActivityReferenceObject',
'CustomDataSourceLinkedService',
@@ -1703,6 +1730,7 @@
'GetSsisObjectMetadataRequest',
'GitHubAccessTokenRequest',
'GitHubAccessTokenResponse',
+ 'GitHubClientSecret',
'GlobalParameterSpecification',
'GoogleAdWordsLinkedService',
'GoogleAdWordsObjectDataset',
@@ -1797,6 +1825,7 @@
'MagentoLinkedService',
'MagentoObjectDataset',
'MagentoSource',
+ 'ManagedIdentityCredential',
'ManagedIntegrationRuntime',
'ManagedIntegrationRuntimeError',
'ManagedIntegrationRuntimeNode',
@@ -1816,12 +1845,14 @@
'MarketoLinkedService',
'MarketoObjectDataset',
'MarketoSource',
+ 'MetadataItem',
'MicrosoftAccessLinkedService',
'MicrosoftAccessSink',
'MicrosoftAccessSource',
'MicrosoftAccessTableDataset',
'MongoDbAtlasCollectionDataset',
'MongoDbAtlasLinkedService',
+ 'MongoDbAtlasSink',
'MongoDbAtlasSource',
'MongoDbCollectionDataset',
'MongoDbCursorMethodsProperties',
@@ -1829,6 +1860,7 @@
'MongoDbSource',
'MongoDbV2CollectionDataset',
'MongoDbV2LinkedService',
+ 'MongoDbV2Sink',
'MongoDbV2Source',
'MultiplePipelineTrigger',
'MySqlLinkedService',
@@ -1977,6 +2009,7 @@
'ServiceNowLinkedService',
'ServiceNowObjectDataset',
'ServiceNowSource',
+ 'ServicePrincipalCredential',
'SetVariableActivity',
'SftpLocation',
'SftpReadSettings',
@@ -2001,6 +2034,7 @@
'SqlAlwaysEncryptedProperties',
'SqlDwSink',
'SqlDwSource',
+ 'SqlDwUpsertSettings',
'SqlMiSink',
'SqlMiSource',
'SqlPartitionSettings',
@@ -2011,6 +2045,7 @@
'SqlServerTableDataset',
'SqlSink',
'SqlSource',
+ 'SqlUpsertSettings',
'SquareLinkedService',
'SquareObjectDataset',
'SquareSource',
@@ -2115,7 +2150,6 @@
'DependencyCondition',
'DynamicsAuthenticationType',
'DynamicsDeploymentType',
- 'DynamicsServicePrincipalCredentialType',
'DynamicsSinkWriteBehavior',
'EventSubscriptionStatus',
'FactoryIdentityType',
@@ -2169,12 +2203,15 @@
'SapTablePartitionOption',
'SelfHostedIntegrationRuntimeNodeStatus',
'ServiceNowAuthenticationType',
+ 'ServicePrincipalCredentialType',
'SftpAuthenticationType',
'SparkAuthenticationType',
'SparkServerType',
'SparkThriftTransportProtocol',
'SqlAlwaysEncryptedAkvAuthType',
+ 'SqlDwWriteBehaviorEnum',
'SqlPartitionOption',
+ 'SqlWriteBehaviorEnum',
'SsisLogLocationType',
'SsisObjectMetadataType',
'SsisPackageLocationType',
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py
index 1e1c0d92c7d..4d250610be9 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py
@@ -77,14 +77,16 @@ class CassandraSourceReadConsistencyLevels(with_metaclass(_CaseInsensitiveEnumMe
LOCAL_SERIAL = "LOCAL_SERIAL"
class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """All available compressionCodec values.
+ """
NONE = "none"
- GZIP = "gzip"
- SNAPPY = "snappy"
LZO = "lzo"
BZIP2 = "bzip2"
+ GZIP = "gzip"
DEFLATE = "deflate"
ZIP_DEFLATE = "zipDeflate"
+ SNAPPY = "snappy"
LZ4 = "lz4"
TAR = "tar"
TAR_G_ZIP = "tarGZip"
@@ -174,9 +176,7 @@ class DependencyCondition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
COMPLETED = "Completed"
class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
- """The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd'
- for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in
- online scenario. Type: string (or Expression with resultType string).
+ """All available dynamicsAuthenticationType values.
"""
OFFICE365 = "Office365"
@@ -184,23 +184,12 @@ class DynamicsAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, E
AAD_SERVICE_PRINCIPAL = "AADServicePrincipal"
class DynamicsDeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
- """The deployment type of the Dynamics instance. 'Online' for Dynamics Online and
- 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with
- resultType string).
+ """All available dynamicsDeploymentType values.
"""
ONLINE = "Online"
ON_PREMISES_WITH_IFD = "OnPremisesWithIfd"
-class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
- """The service principal credential type to use in Server-To-Server authentication.
- 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or
- Expression with resultType string).
- """
-
- SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey"
- SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert"
-
class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Defines values for DynamicsSinkWriteBehavior.
"""
@@ -267,7 +256,7 @@ class HBaseAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum
BASIC = "Basic"
class HdiNodeTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
- """The node types on which the script action should be executed.
+ """All available HdiNodeTypes values.
"""
HEADNODE = "Headnode"
@@ -417,8 +406,7 @@ class JsonFormatFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum))
ARRAY_OF_OBJECTS = "arrayOfObjects"
class JsonWriteFilePattern(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
- """File pattern of JSON. This setting controls the way a collection of JSON objects will be
- treated. The default value is 'setOfObjects'. It is case-sensitive.
+ """All available filePatterns.
"""
SET_OF_OBJECTS = "setOfObjects"
@@ -661,6 +649,13 @@ class ServiceNowAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str,
BASIC = "Basic"
O_AUTH2 = "OAuth2"
+class ServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """All available servicePrincipalCredentialType values.
+ """
+
+ SERVICE_PRINCIPAL_KEY = "ServicePrincipalKey"
+ SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert"
+
class SftpAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The authentication type to be used to connect to the FTP server.
"""
@@ -702,6 +697,13 @@ class SqlAlwaysEncryptedAkvAuthType(with_metaclass(_CaseInsensitiveEnumMeta, str
SERVICE_PRINCIPAL = "ServicePrincipal"
MANAGED_IDENTITY = "ManagedIdentity"
+class SqlDwWriteBehaviorEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Specify the write behavior when copying data into sql dw.
+ """
+
+ INSERT = "Insert"
+ UPSERT = "Upsert"
+
class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The partition mechanism that will be used for Sql read in parallel.
"""
@@ -710,6 +712,14 @@ class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable"
DYNAMIC_RANGE = "DynamicRange"
+class SqlWriteBehaviorEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Specify the write behavior when copying data into sql.
+ """
+
+ INSERT = "Insert"
+ UPSERT = "Upsert"
+ STORED_PROCEDURE = "StoredProcedure"
+
class SsisLogLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of SSIS log location.
"""
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py
index e97fd0ab305..f4afaf3b9ad 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py
@@ -634,6 +634,9 @@ class CopySource(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -646,6 +649,7 @@ class CopySource(msrest.serialization.Model):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
_subtype_map = {
@@ -662,6 +666,7 @@ def __init__(
self.source_retry_count = kwargs.get('source_retry_count', None)
self.source_retry_wait = kwargs.get('source_retry_wait', None)
self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None)
+ self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None)
class TabularSource(CopySource):
@@ -686,6 +691,9 @@ class TabularSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -704,6 +712,7 @@ class TabularSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -741,6 +750,9 @@ class AmazonMwsSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -762,6 +774,7 @@ class AmazonMwsSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -868,6 +881,9 @@ class AmazonRedshiftSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -892,6 +908,7 @@ class AmazonRedshiftSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -1156,6 +1173,9 @@ class StoreReadSettings(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -1166,6 +1186,7 @@ class StoreReadSettings(msrest.serialization.Model):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
_subtype_map = {
@@ -1180,6 +1201,7 @@ def __init__(
self.additional_properties = kwargs.get('additional_properties', None)
self.type = 'StoreReadSettings' # type: str
self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None)
+ self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None)
class AmazonS3CompatibleReadSettings(StoreReadSettings):
@@ -1195,6 +1217,9 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -1235,6 +1260,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -1490,6 +1516,9 @@ class AmazonS3ReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -1530,6 +1559,7 @@ class AmazonS3ReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -1664,10 +1694,9 @@ class AvroDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the avro storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz",
- "bzip2".
- :type avro_compression_codec: str or
- ~data_factory_management_client.models.AvroCompressionCodec
+ :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with
+ resultType string).
+ :type avro_compression_codec: object
:param avro_compression_level:
:type avro_compression_level: int
"""
@@ -1689,7 +1718,7 @@ class AvroDataset(Dataset):
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
- 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'},
+ 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'},
'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'},
}
@@ -1788,7 +1817,7 @@ class CopySink(msrest.serialization.Model):
"""A copy activity sink.
You probably want to use the sub-classes and not this class directly. Known
- sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink.
+ sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink.
All required parameters must be populated in order to send to Azure.
@@ -1812,6 +1841,9 @@ class CopySink(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -1826,10 +1858,11 @@ class CopySink(msrest.serialization.Model):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
_subtype_map = {
- 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'}
+ 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'}
}
def __init__(
@@ -1844,6 +1877,7 @@ def __init__(
self.sink_retry_count = kwargs.get('sink_retry_count', None)
self.sink_retry_wait = kwargs.get('sink_retry_wait', None)
self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None)
+ self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None)
class AvroSink(CopySink):
@@ -1871,6 +1905,9 @@ class AvroSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Avro store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: Avro format settings.
@@ -1889,6 +1926,7 @@ class AvroSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'},
}
@@ -1922,6 +1960,9 @@ class AvroSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Avro store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
@@ -1939,6 +1980,7 @@ class AvroSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -2135,6 +2177,8 @@ class AzureBatchLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -2158,6 +2202,7 @@ class AzureBatchLinkedService(LinkedService):
'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'},
'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -2172,6 +2217,7 @@ def __init__(
self.pool_name = kwargs['pool_name']
self.linked_service_name = kwargs['linked_service_name']
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
class AzureBlobDataset(Dataset):
@@ -2374,6 +2420,8 @@ class AzureBlobFsLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -2395,6 +2443,7 @@ class AzureBlobFsLinkedService(LinkedService):
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -2410,6 +2459,7 @@ def __init__(
self.tenant = kwargs.get('tenant', None)
self.azure_cloud_type = kwargs.get('azure_cloud_type', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
class AzureBlobFsLocation(DatasetLocation):
@@ -2467,6 +2517,9 @@ class AzureBlobFsReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -2504,6 +2557,7 @@ class AzureBlobFsReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -2557,8 +2611,14 @@ class AzureBlobFsSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
+ :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects
+ (or Expression with resultType array of objects).
+ :type metadata: list[~data_factory_management_client.models.MetadataItem]
"""
_validation = {
@@ -2573,7 +2633,9 @@ class AzureBlobFsSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
+ 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'},
}
def __init__(
@@ -2583,6 +2645,7 @@ def __init__(
super(AzureBlobFsSink, self).__init__(**kwargs)
self.type = 'AzureBlobFSSink' # type: str
self.copy_behavior = kwargs.get('copy_behavior', None)
+ self.metadata = kwargs.get('metadata', None)
class AzureBlobFsSource(CopySource):
@@ -2604,6 +2667,9 @@ class AzureBlobFsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType
boolean).
:type treat_empty_as_null: object
@@ -2625,6 +2691,7 @@ class AzureBlobFsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'},
'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
@@ -2657,6 +2724,9 @@ class StoreWriteSettings(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -2669,6 +2739,7 @@ class StoreWriteSettings(msrest.serialization.Model):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -2684,6 +2755,7 @@ def __init__(
self.additional_properties = kwargs.get('additional_properties', None)
self.type = 'StoreWriteSettings' # type: str
self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None)
+ self.disable_metrics_collection = kwargs.get('disable_metrics_collection', None)
self.copy_behavior = kwargs.get('copy_behavior', None)
@@ -2700,6 +2772,9 @@ class AzureBlobFsWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer
@@ -2715,6 +2790,7 @@ class AzureBlobFsWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'},
}
@@ -2781,6 +2857,8 @@ class AzureBlobStorageLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: str
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -2805,6 +2883,7 @@ class AzureBlobStorageLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -2824,6 +2903,7 @@ def __init__(
self.azure_cloud_type = kwargs.get('azure_cloud_type', None)
self.account_kind = kwargs.get('account_kind', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
class AzureBlobStorageLocation(DatasetLocation):
@@ -2881,6 +2961,9 @@ class AzureBlobStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -2921,6 +3004,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -2964,6 +3048,9 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer
@@ -2979,6 +3066,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'},
}
@@ -3296,6 +3384,9 @@ class AzureDatabricksDeltaLakeSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -3316,6 +3407,7 @@ class AzureDatabricksDeltaLakeSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'},
}
@@ -3349,6 +3441,9 @@ class AzureDatabricksDeltaLakeSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with
resultType string).
:type query: object
@@ -3367,6 +3462,7 @@ class AzureDatabricksDeltaLakeSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'},
}
@@ -3464,6 +3560,8 @@ class AzureDatabricksLinkedService(LinkedService):
:param policy_id: The policy id for limiting the ability to configure clusters based on a user
defined set of rules. Type: string (or Expression with resultType string).
:type policy_id: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -3496,6 +3594,7 @@ class AzureDatabricksLinkedService(LinkedService):
'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -3522,6 +3621,7 @@ def __init__(
self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
self.policy_id = kwargs.get('policy_id', None)
+ self.credential = kwargs.get('credential', None)
class ExecutionActivity(Activity):
@@ -3674,6 +3774,8 @@ class AzureDataExplorerLinkedService(LinkedService):
:param tenant: The name or ID of the tenant to which the service principal belongs. Type:
string (or Expression with resultType string).
:type tenant: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -3694,6 +3796,7 @@ class AzureDataExplorerLinkedService(LinkedService):
'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'},
'database': {'key': 'typeProperties.database', 'type': 'object'},
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -3707,6 +3810,7 @@ def __init__(
self.service_principal_key = kwargs.get('service_principal_key', None)
self.database = kwargs['database']
self.tenant = kwargs.get('tenant', None)
+ self.credential = kwargs.get('credential', None)
class AzureDataExplorerSink(CopySink):
@@ -3734,6 +3838,9 @@ class AzureDataExplorerSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the
target Kusto table. Type: string.
:type ingestion_mapping_name: object
@@ -3757,6 +3864,7 @@ class AzureDataExplorerSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'},
'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'},
'flush_immediately': {'key': 'flushImmediately', 'type': 'object'},
@@ -3792,6 +3900,9 @@ class AzureDataExplorerSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type:
string (or Expression with resultType string).
:type query: object
@@ -3817,6 +3928,7 @@ class AzureDataExplorerSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'no_truncation': {'key': 'noTruncation', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
@@ -4098,6 +4210,8 @@ class AzureDataLakeStoreLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -4121,6 +4235,7 @@ class AzureDataLakeStoreLinkedService(LinkedService):
'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'},
'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -4138,6 +4253,7 @@ def __init__(
self.subscription_id = kwargs.get('subscription_id', None)
self.resource_group_name = kwargs.get('resource_group_name', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
class AzureDataLakeStoreLocation(DatasetLocation):
@@ -4190,6 +4306,9 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -4235,6 +4354,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -4292,6 +4412,9 @@ class AzureDataLakeStoreSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param enable_adls_single_file_parallel: Single File Parallel.
@@ -4310,6 +4433,7 @@ class AzureDataLakeStoreSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'},
}
@@ -4343,6 +4467,9 @@ class AzureDataLakeStoreSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -4358,6 +4485,7 @@ class AzureDataLakeStoreSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
}
@@ -4383,6 +4511,9 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param expiry_date_time: Specifies the expiry time of the written files. The time is applied to
@@ -4399,6 +4530,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'},
}
@@ -4550,6 +4682,9 @@ class AzureFileStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -4590,6 +4725,7 @@ class AzureFileStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -4633,6 +4769,9 @@ class AzureFileStorageWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -4645,6 +4784,7 @@ class AzureFileStorageWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -4754,6 +4894,13 @@ class AzureFunctionLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
+ :param resource_id: Allowed token audiences for azure function.
+ :type resource_id: object
+ :param authentication: Type of authentication (Required to specify MSI) used to connect to
+ AzureFunction. Type: string (or Expression with resultType string).
+ :type authentication: object
"""
_validation = {
@@ -4771,6 +4918,9 @@ class AzureFunctionLinkedService(LinkedService):
'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'},
'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
+ 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'object'},
+ 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'},
}
def __init__(
@@ -4782,6 +4932,9 @@ def __init__(
self.function_app_url = kwargs['function_app_url']
self.function_key = kwargs.get('function_key', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
+ self.resource_id = kwargs.get('resource_id', None)
+ self.authentication = kwargs.get('authentication', None)
class AzureKeyVaultLinkedService(LinkedService):
@@ -4805,6 +4958,8 @@ class AzureKeyVaultLinkedService(LinkedService):
:param base_url: Required. The base URL of the Azure Key Vault. e.g.
https://myakv.vault.azure.net Type: string (or Expression with resultType string).
:type base_url: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -4820,6 +4975,7 @@ class AzureKeyVaultLinkedService(LinkedService):
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -4829,6 +4985,7 @@ def __init__(
super(AzureKeyVaultLinkedService, self).__init__(**kwargs)
self.type = 'AzureKeyVault' # type: str
self.base_url = kwargs['base_url']
+ self.credential = kwargs.get('credential', None)
class SecretBase(msrest.serialization.Model):
@@ -4979,6 +5136,9 @@ class AzureMariaDbSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -5000,6 +5160,7 @@ class AzureMariaDbSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -5278,6 +5439,9 @@ class AzureMlLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param authentication: Type of authentication (Required to specify MSI) used to connect to
+ AzureML. Type: string (or Expression with resultType string).
+ :type authentication: object
"""
_validation = {
@@ -5300,6 +5464,7 @@ class AzureMlLinkedService(LinkedService):
'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'},
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'},
}
def __init__(
@@ -5315,6 +5480,7 @@ def __init__(
self.service_principal_key = kwargs.get('service_principal_key', None)
self.tenant = kwargs.get('tenant', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.authentication = kwargs.get('authentication', None)
class AzureMlServiceLinkedService(LinkedService):
@@ -5580,6 +5746,9 @@ class AzureMySqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -5597,6 +5766,7 @@ class AzureMySqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -5628,6 +5798,9 @@ class AzureMySqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -5648,6 +5821,7 @@ class AzureMySqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -5807,6 +5981,9 @@ class AzurePostgreSqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -5824,6 +6001,7 @@ class AzurePostgreSqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -5855,6 +6033,9 @@ class AzurePostgreSqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -5876,6 +6057,7 @@ class AzurePostgreSqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -5984,6 +6166,9 @@ class AzureQueueSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -5998,6 +6183,7 @@ class AzureQueueSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
def __init__(
@@ -6093,6 +6279,9 @@ class AzureSearchIndexSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Specify the write behavior when upserting documents into Azure Search
Index. Possible values include: "Merge", "Upload".
:type write_behavior: str or
@@ -6111,6 +6300,7 @@ class AzureSearchIndexSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
}
@@ -6223,6 +6413,8 @@ class AzureSqlDatabaseLinkedService(LinkedService):
:param always_encrypted_settings: Sql always encrypted properties.
:type always_encrypted_settings:
~data_factory_management_client.models.SqlAlwaysEncryptedProperties
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -6245,6 +6437,7 @@ class AzureSqlDatabaseLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -6261,6 +6454,7 @@ def __init__(
self.azure_cloud_type = kwargs.get('azure_cloud_type', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None)
+ self.credential = kwargs.get('credential', None)
class AzureSqlDwLinkedService(LinkedService):
@@ -6303,6 +6497,8 @@ class AzureSqlDwLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -6324,6 +6520,7 @@ class AzureSqlDwLinkedService(LinkedService):
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -6339,6 +6536,7 @@ def __init__(
self.tenant = kwargs.get('tenant', None)
self.azure_cloud_type = kwargs.get('azure_cloud_type', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
class AzureSqlDwTableDataset(Dataset):
@@ -6453,6 +6651,8 @@ class AzureSqlMiLinkedService(LinkedService):
:param always_encrypted_settings: Sql always encrypted properties.
:type always_encrypted_settings:
~data_factory_management_client.models.SqlAlwaysEncryptedProperties
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -6475,6 +6675,7 @@ class AzureSqlMiLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -6491,6 +6692,7 @@ def __init__(
self.azure_cloud_type = kwargs.get('azure_cloud_type', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None)
+ self.credential = kwargs.get('credential', None)
class AzureSqlMiTableDataset(Dataset):
@@ -6587,6 +6789,9 @@ class AzureSqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -6605,6 +6810,14 @@ class AzureSqlSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into Azure SQL. Type:
+ SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -6619,12 +6832,16 @@ class AzureSqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -6639,6 +6856,9 @@ def __init__(
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None)
self.table_option = kwargs.get('table_option', None)
+ self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None)
+ self.write_behavior = kwargs.get('write_behavior', None)
+ self.upsert_settings = kwargs.get('upsert_settings', None)
class AzureSqlSource(TabularSource):
@@ -6660,6 +6880,9 @@ class AzureSqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -6695,6 +6918,7 @@ class AzureSqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
@@ -6938,6 +7162,9 @@ class AzureTableSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param azure_table_default_partition_key_value: Azure Table default partition key value. Type:
string (or Expression with resultType string).
:type azure_table_default_partition_key_value: object
@@ -6964,6 +7191,7 @@ class AzureTableSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'},
'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'},
'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'},
@@ -7001,6 +7229,9 @@ class AzureTableSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -7025,6 +7256,7 @@ class AzureTableSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'},
@@ -7263,6 +7495,9 @@ class BinarySink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Binary store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
"""
@@ -7279,6 +7514,7 @@ class BinarySink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
}
@@ -7310,6 +7546,9 @@ class BinarySource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Binary store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: Binary format settings.
@@ -7326,6 +7565,7 @@ class BinarySource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'},
}
@@ -7543,6 +7783,9 @@ class BlobSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression
with resultType boolean).
:type blob_writer_overwrite_files: object
@@ -7554,6 +7797,9 @@ class BlobSink(CopySink):
:type blob_writer_add_header: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
+ :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects
+ (or Expression with resultType array of objects).
+ :type metadata: list[~data_factory_management_client.models.MetadataItem]
"""
_validation = {
@@ -7568,10 +7814,12 @@ class BlobSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'},
'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'},
'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
+ 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'},
}
def __init__(
@@ -7584,6 +7832,7 @@ def __init__(
self.blob_writer_date_time_format = kwargs.get('blob_writer_date_time_format', None)
self.blob_writer_add_header = kwargs.get('blob_writer_add_header', None)
self.copy_behavior = kwargs.get('copy_behavior', None)
+ self.metadata = kwargs.get('metadata', None)
class BlobSource(CopySource):
@@ -7605,6 +7854,9 @@ class BlobSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType
boolean).
:type treat_empty_as_null: object
@@ -7626,6 +7878,7 @@ class BlobSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'},
'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
@@ -7794,6 +8047,9 @@ class CassandraSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -7823,6 +8079,7 @@ class CassandraSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -8140,8 +8397,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
:param deployment_type: Required. The deployment type of the Common Data Service for Apps
instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common
Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType
- string). Possible values include: "Online", "OnPremisesWithIfd".
- :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType
+ string).
+ :type deployment_type: object
:param host_name: The host name of the on-premises Common Data Service for Apps server. The
property is required for on-prem and not allowed for online. Type: string (or Expression with
resultType string).
@@ -8162,10 +8419,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
:param authentication_type: Required. The authentication type to connect to Common Data Service
for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario.
'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or
- Expression with resultType string). Possible values include: "Office365", "Ifd",
- "AADServicePrincipal".
- :type authentication_type: str or
- ~data_factory_management_client.models.DynamicsAuthenticationType
+ Expression with resultType string).
+ :type authentication_type: object
:param username: User name to access the Common Data Service for Apps instance. Type: string
(or Expression with resultType string).
:type username: object
@@ -8176,10 +8431,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
:type service_principal_id: object
:param service_principal_credential_type: The service principal credential type to use in
Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert'
- for certificate. Type: string (or Expression with resultType string). Possible values include:
- "ServicePrincipalKey", "ServicePrincipalCert".
- :type service_principal_credential_type: str or
- ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType
+ for certificate. Type: string (or Expression with resultType string).
+ :type service_principal_credential_type: object
:param service_principal_credential: The credential of the service principal object in Azure
Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey',
servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If
@@ -8205,16 +8458,16 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
- 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
+ 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
- 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
+ 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
- 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'},
+ 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'},
'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}
@@ -8264,6 +8517,9 @@ class CommonDataServiceForAppsSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Required. The write behavior for the operation. Possible values include:
"Upsert".
:type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior
@@ -8289,6 +8545,7 @@ class CommonDataServiceForAppsSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'},
@@ -8324,6 +8581,9 @@ class CommonDataServiceForAppsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: FetchXML is a proprietary query language that is used in Microsoft Common Data
Service for Apps (online & on-premises). Type: string (or Expression with resultType string).
:type query: object
@@ -8342,6 +8602,7 @@ class CommonDataServiceForAppsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -8587,6 +8848,9 @@ class ConcurSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -8608,6 +8872,7 @@ class ConcurSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -9069,6 +9334,9 @@ class CosmosDbMongoDbApiLinkedService(LinkedService):
:type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the linked service.
:type annotations: list[object]
+ :param is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher
+ than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean).
+ :type is_server_version_above32: object
:param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string,
SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or
AzureKeyVaultSecretReference.
@@ -9091,6 +9359,7 @@ class CosmosDbMongoDbApiLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
+ 'is_server_version_above32': {'key': 'typeProperties.isServerVersionAbove32', 'type': 'object'},
'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'},
'database': {'key': 'typeProperties.database', 'type': 'object'},
}
@@ -9101,6 +9370,7 @@ def __init__(
):
super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs)
self.type = 'CosmosDbMongoDbApi' # type: str
+ self.is_server_version_above32 = kwargs.get('is_server_version_above32', None)
self.connection_string = kwargs['connection_string']
self.database = kwargs['database']
@@ -9130,6 +9400,9 @@ class CosmosDbMongoDbApiSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Specifies whether the document with same key to be overwritten (upsert)
rather than throw exception (insert). The default value is "insert". Type: string (or
Expression with resultType string). Type: string (or Expression with resultType string).
@@ -9148,6 +9421,7 @@ class CosmosDbMongoDbApiSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
}
@@ -9179,6 +9453,9 @@ class CosmosDbMongoDbApiSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param filter: Specifies selection filter using query operators. To return all documents in a
collection, omit this parameter or pass an empty document ({}). Type: string (or Expression
with resultType string).
@@ -9208,6 +9485,7 @@ class CosmosDbMongoDbApiSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'filter': {'key': 'filter', 'type': 'object'},
'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'},
'batch_size': {'key': 'batchSize', 'type': 'object'},
@@ -9313,6 +9591,9 @@ class CosmosDbSqlApiSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or
Expression with resultType string). Allowed values: insert and upsert.
:type write_behavior: object
@@ -9330,6 +9611,7 @@ class CosmosDbSqlApiSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
}
@@ -9361,6 +9643,9 @@ class CosmosDbSqlApiSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: SQL API query. Type: string (or Expression with resultType string).
:type query: object
:param page_size: Page size of the result. Type: integer (or Expression with resultType
@@ -9387,6 +9672,7 @@ class CosmosDbSqlApiSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'page_size': {'key': 'pageSize', 'type': 'object'},
'preferred_regions': {'key': 'preferredRegions', 'type': 'object'},
@@ -9482,6 +9768,9 @@ class CouchbaseSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -9503,6 +9792,7 @@ class CouchbaseSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -9691,6 +9981,172 @@ def __init__(
self.run_id = kwargs['run_id']
+class Credential(msrest.serialization.Model):
+ """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: ManagedIdentityCredential, ServicePrincipalCredential.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Type of credential.Constant filled by server.
+ :type type: str
+ :param description: Credential description.
+ :type description: str
+ :param annotations: List of tags that can be used for describing the Credential.
+ :type annotations: list[object]
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'annotations': {'key': 'annotations', 'type': '[object]'},
+ }
+
+ _subtype_map = {
+ 'type': {'ManagedIdentity': 'ManagedIdentityCredential', 'ServicePrincipal': 'ServicePrincipalCredential'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Credential, self).__init__(**kwargs)
+ self.additional_properties = kwargs.get('additional_properties', None)
+ self.type = 'Credential' # type: str
+ self.description = kwargs.get('description', None)
+ self.annotations = kwargs.get('annotations', None)
+
+
+class CredentialReference(msrest.serialization.Model):
+ """Credential reference type.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :ivar type: Required. Credential reference type. Default value: "CredentialReference".
+ :vartype type: str
+ :param reference_name: Required. Reference credential name.
+ :type reference_name: str
+ """
+
+ _validation = {
+ 'type': {'required': True, 'constant': True},
+ 'reference_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'reference_name': {'key': 'referenceName', 'type': 'str'},
+ }
+
+ type = "CredentialReference"
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CredentialReference, self).__init__(**kwargs)
+ self.additional_properties = kwargs.get('additional_properties', None)
+ self.reference_name = kwargs['reference_name']
+
+
+class SubResource(msrest.serialization.Model):
+ """Azure Data Factory nested resource, which belongs to a factory.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource identifier.
+ :vartype id: str
+ :ivar name: The resource name.
+ :vartype name: str
+ :ivar type: The resource type.
+ :vartype type: str
+ :ivar etag: Etag identifies change in the resource.
+ :vartype etag: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'etag': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'etag': {'key': 'etag', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SubResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.etag = None
+
+
+class CredentialResource(SubResource):
+ """Credential resource type.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource identifier.
+ :vartype id: str
+ :ivar name: The resource name.
+ :vartype name: str
+ :ivar type: The resource type.
+ :vartype type: str
+ :ivar etag: Etag identifies change in the resource.
+ :vartype etag: str
+ :param properties: Required. Properties of credentials.
+ :type properties: ~data_factory_management_client.models.Credential
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'etag': {'readonly': True},
+ 'properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'etag': {'key': 'etag', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'Credential'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CredentialResource, self).__init__(**kwargs)
+ self.properties = kwargs['properties']
+
+
class CustomActivity(ExecutionActivity):
"""Custom activity type.
@@ -10546,46 +11002,6 @@ def __init__(
self.dataset_parameters = kwargs.get('dataset_parameters', None)
-class SubResource(msrest.serialization.Model):
- """Azure Data Factory nested resource, which belongs to a factory.
-
- Variables are only populated by the server, and will be ignored when sending a request.
-
- :ivar id: The resource identifier.
- :vartype id: str
- :ivar name: The resource name.
- :vartype name: str
- :ivar type: The resource type.
- :vartype type: str
- :ivar etag: Etag identifies change in the resource.
- :vartype etag: str
- """
-
- _validation = {
- 'id': {'readonly': True},
- 'name': {'readonly': True},
- 'type': {'readonly': True},
- 'etag': {'readonly': True},
- }
-
- _attribute_map = {
- 'id': {'key': 'id', 'type': 'str'},
- 'name': {'key': 'name', 'type': 'str'},
- 'type': {'key': 'type', 'type': 'str'},
- 'etag': {'key': 'etag', 'type': 'str'},
- }
-
- def __init__(
- self,
- **kwargs
- ):
- super(SubResource, self).__init__(**kwargs)
- self.id = None
- self.name = None
- self.type = None
- self.etag = None
-
-
class DataFlowResource(SubResource):
"""Data flow resource type.
@@ -10999,8 +11415,8 @@ class DatasetDeflateCompression(DatasetCompression):
:type additional_properties: dict[str, object]
:param type: Required. Type of dataset compression.Constant filled by server.
:type type: str
- :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param level: The Deflate compression level.
+ :type level: object
"""
_validation = {
@@ -11010,7 +11426,7 @@ class DatasetDeflateCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
@@ -11051,8 +11467,8 @@ class DatasetGZipCompression(DatasetCompression):
:type additional_properties: dict[str, object]
:param type: Required. Type of dataset compression.Constant filled by server.
:type type: str
- :param level: The GZip compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param level: The GZip compression level.
+ :type level: object
"""
_validation = {
@@ -11062,7 +11478,7 @@ class DatasetGZipCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
@@ -11250,8 +11666,8 @@ class DatasetTarGZipCompression(DatasetCompression):
:type additional_properties: dict[str, object]
:param type: Required. Type of dataset compression.Constant filled by server.
:type type: str
- :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param level: The TarGZip compression level.
+ :type level: object
"""
_validation = {
@@ -11261,7 +11677,7 @@ class DatasetTarGZipCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
@@ -11283,8 +11699,8 @@ class DatasetZipDeflateCompression(DatasetCompression):
:type additional_properties: dict[str, object]
:param type: Required. Type of dataset compression.Constant filled by server.
:type type: str
- :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param level: The ZipDeflate compression level.
+ :type level: object
"""
_validation = {
@@ -11294,7 +11710,7 @@ class DatasetZipDeflateCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
@@ -11413,6 +11829,9 @@ class Db2Source(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -11433,6 +11852,7 @@ class Db2Source(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -11652,12 +12072,11 @@ class DelimitedTextDataset(Dataset):
https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with
resultType string).
:type encoding_name: object
- :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2",
- "deflate", "zipDeflate", "lz4", "tar", "tarGZip".
- :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec
- :param compression_level: The data compression method used for DelimitedText. Possible values
- include: "Optimal", "Fastest".
- :type compression_level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param compression_codec: The data compressionCodec. Type: string (or Expression with
+ resultType string).
+ :type compression_codec: object
+ :param compression_level: The data compression method used for DelimitedText.
+ :type compression_level: object
:param quote_char: The quote character. Type: string (or Expression with resultType string).
:type quote_char: object
:param escape_char: The escape character. Type: string (or Expression with resultType string).
@@ -11689,8 +12108,8 @@ class DelimitedTextDataset(Dataset):
'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'},
'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'},
'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'},
- 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'},
- 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'},
+ 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'},
+ 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'},
'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'},
'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'},
'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'},
@@ -11778,6 +12197,9 @@ class DelimitedTextSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: DelimitedText store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: DelimitedText format settings.
@@ -11796,6 +12218,7 @@ class DelimitedTextSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'},
}
@@ -11829,6 +12252,9 @@ class DelimitedTextSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: DelimitedText store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: DelimitedText format settings.
@@ -11848,6 +12274,7 @@ class DelimitedTextSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
@@ -12070,6 +12497,9 @@ class DocumentDbCollectionSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or
Expression with resultType string).
:type nesting_separator: object
@@ -12090,6 +12520,7 @@ class DocumentDbCollectionSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
}
@@ -12123,6 +12554,9 @@ class DocumentDbCollectionSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Documents query. Type: string (or Expression with resultType string).
:type query: object
:param nesting_separator: Nested properties separator. Type: string (or Expression with
@@ -12146,6 +12580,7 @@ class DocumentDbCollectionSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
@@ -12239,6 +12674,9 @@ class DrillSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -12260,6 +12698,7 @@ class DrillSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -12552,6 +12991,9 @@ class DynamicsAxSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -12578,6 +13020,7 @@ class DynamicsAxSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -12673,9 +13116,8 @@ class DynamicsCrmLinkedService(LinkedService):
:type annotations: list[object]
:param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online'
for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type:
- string (or Expression with resultType string). Possible values include: "Online",
- "OnPremisesWithIfd".
- :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType
+ string (or Expression with resultType string).
+ :type deployment_type: object
:param host_name: The host name of the on-premises Dynamics CRM server. The property is
required for on-prem and not allowed for online. Type: string (or Expression with resultType
string).
@@ -12694,10 +13136,8 @@ class DynamicsCrmLinkedService(LinkedService):
:param authentication_type: Required. The authentication type to connect to Dynamics CRM
server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario,
'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or
- Expression with resultType string). Possible values include: "Office365", "Ifd",
- "AADServicePrincipal".
- :type authentication_type: str or
- ~data_factory_management_client.models.DynamicsAuthenticationType
+ Expression with resultType string).
+ :type authentication_type: object
:param username: User name to access the Dynamics CRM instance. Type: string (or Expression
with resultType string).
:type username: object
@@ -12708,10 +13148,8 @@ class DynamicsCrmLinkedService(LinkedService):
:type service_principal_id: object
:param service_principal_credential_type: The service principal credential type to use in
Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert'
- for certificate. Type: string (or Expression with resultType string). Possible values include:
- "ServicePrincipalKey", "ServicePrincipalCert".
- :type service_principal_credential_type: str or
- ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType
+ for certificate. Type: string (or Expression with resultType string).
+ :type service_principal_credential_type: object
:param service_principal_credential: The credential of the service principal object in Azure
Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey',
servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If
@@ -12737,16 +13175,16 @@ class DynamicsCrmLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
- 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
+ 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
- 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
+ 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
- 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'},
+ 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'},
'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}
@@ -12796,6 +13234,9 @@ class DynamicsCrmSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Required. The write behavior for the operation. Possible values include:
"Upsert".
:type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior
@@ -12821,6 +13262,7 @@ class DynamicsCrmSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'},
@@ -12856,6 +13298,9 @@ class DynamicsCrmSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM
(online & on-premises). Type: string (or Expression with resultType string).
:type query: object
@@ -12874,6 +13319,7 @@ class DynamicsCrmSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -12967,8 +13413,8 @@ class DynamicsLinkedService(LinkedService):
:type annotations: list[object]
:param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for
Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or
- Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd".
- :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType
+ Expression with resultType string).
+ :type deployment_type: object
:param host_name: The host name of the on-premises Dynamics server. The property is required
for on-prem and not allowed for online. Type: string (or Expression with resultType string).
:type host_name: object
@@ -12986,9 +13432,8 @@ class DynamicsLinkedService(LinkedService):
:param authentication_type: Required. The authentication type to connect to Dynamics server.
'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal'
for Server-To-Server authentication in online scenario. Type: string (or Expression with
- resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal".
- :type authentication_type: str or
- ~data_factory_management_client.models.DynamicsAuthenticationType
+ resultType string).
+ :type authentication_type: object
:param username: User name to access the Dynamics instance. Type: string (or Expression with
resultType string).
:type username: object
@@ -12999,10 +13444,8 @@ class DynamicsLinkedService(LinkedService):
:type service_principal_id: object
:param service_principal_credential_type: The service principal credential type to use in
Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert'
- for certificate. Type: string (or Expression with resultType string). Possible values include:
- "ServicePrincipalKey", "ServicePrincipalCert".
- :type service_principal_credential_type: str or
- ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType
+ for certificate. Type: string (or Expression with resultType string).
+ :type service_principal_credential_type: str
:param service_principal_credential: The credential of the service principal object in Azure
Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey',
servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If
@@ -13028,12 +13471,12 @@ class DynamicsLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
- 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
+ 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
- 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
+ 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
@@ -13087,6 +13530,9 @@ class DynamicsSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Required. The write behavior for the operation. Possible values include:
"Upsert".
:type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior
@@ -13112,6 +13558,7 @@ class DynamicsSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'},
@@ -13147,6 +13594,9 @@ class DynamicsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics
(online & on-premises). Type: string (or Expression with resultType string).
:type query: object
@@ -13165,6 +13615,7 @@ class DynamicsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -13334,6 +13785,9 @@ class EloquaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -13355,6 +13809,7 @@ class EloquaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -13498,9 +13953,12 @@ class ExcelDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the excel storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType
+ :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType
string).
:type sheet_name: object
+ :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or
+ Expression with resultType integer).
+ :type sheet_index: object
:param range: The partial data of one sheet. Type: string (or Expression with resultType
string).
:type range: object
@@ -13531,6 +13989,7 @@ class ExcelDataset(Dataset):
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'},
+ 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'},
'range': {'key': 'typeProperties.range', 'type': 'object'},
'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'},
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
@@ -13545,6 +14004,7 @@ def __init__(
self.type = 'Excel' # type: str
self.location = kwargs.get('location', None)
self.sheet_name = kwargs.get('sheet_name', None)
+ self.sheet_index = kwargs.get('sheet_index', None)
self.range = kwargs.get('range', None)
self.first_row_as_header = kwargs.get('first_row_as_header', None)
self.compression = kwargs.get('compression', None)
@@ -13570,6 +14030,9 @@ class ExcelSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Excel store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
@@ -13587,6 +14050,7 @@ class ExcelSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -14527,6 +14991,9 @@ class FileServerReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -14567,6 +15034,7 @@ class FileServerReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -14610,6 +15078,9 @@ class FileServerWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -14622,6 +15093,7 @@ class FileServerWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -14745,6 +15217,9 @@ class FileSystemSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -14761,6 +15236,7 @@ class FileSystemSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -14792,6 +15268,9 @@ class FileSystemSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -14810,6 +15289,7 @@ class FileSystemSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -14951,6 +15431,9 @@ class FtpReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -14984,6 +15467,7 @@ class FtpReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -15247,6 +15731,8 @@ class GitHubAccessTokenRequest(msrest.serialization.Model):
:type git_hub_access_code: str
:param git_hub_client_id: GitHub application client ID.
:type git_hub_client_id: str
+ :param git_hub_client_secret: GitHub bring your own app client secret information.
+ :type git_hub_client_secret: ~data_factory_management_client.models.GitHubClientSecret
:param git_hub_access_token_base_url: Required. GitHub access token base URL.
:type git_hub_access_token_base_url: str
"""
@@ -15259,6 +15745,7 @@ class GitHubAccessTokenRequest(msrest.serialization.Model):
_attribute_map = {
'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'},
'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'},
+ 'git_hub_client_secret': {'key': 'gitHubClientSecret', 'type': 'GitHubClientSecret'},
'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'},
}
@@ -15269,6 +15756,7 @@ def __init__(
super(GitHubAccessTokenRequest, self).__init__(**kwargs)
self.git_hub_access_code = kwargs['git_hub_access_code']
self.git_hub_client_id = kwargs.get('git_hub_client_id', None)
+ self.git_hub_client_secret = kwargs.get('git_hub_client_secret', None)
self.git_hub_access_token_base_url = kwargs['git_hub_access_token_base_url']
@@ -15291,6 +15779,29 @@ def __init__(
self.git_hub_access_token = kwargs.get('git_hub_access_token', None)
+class GitHubClientSecret(msrest.serialization.Model):
+ """Client secret information for factory's bring your own app repository configuration.
+
+ :param byoa_secret_akv_url: Bring your own app client secret AKV URL.
+ :type byoa_secret_akv_url: str
+ :param byoa_secret_name: Bring your own app client secret name in AKV.
+ :type byoa_secret_name: str
+ """
+
+ _attribute_map = {
+ 'byoa_secret_akv_url': {'key': 'byoaSecretAkvUrl', 'type': 'str'},
+ 'byoa_secret_name': {'key': 'byoaSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(GitHubClientSecret, self).__init__(**kwargs)
+ self.byoa_secret_akv_url = kwargs.get('byoa_secret_akv_url', None)
+ self.byoa_secret_name = kwargs.get('byoa_secret_name', None)
+
+
class GlobalParameterSpecification(msrest.serialization.Model):
"""Definition of a single parameter for an entity.
@@ -15502,6 +16013,9 @@ class GoogleAdWordsSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -15523,6 +16037,7 @@ class GoogleAdWordsSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -15731,6 +16246,9 @@ class GoogleBigQuerySource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -15752,6 +16270,7 @@ class GoogleBigQuerySource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -15890,6 +16409,9 @@ class GoogleCloudStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -15930,6 +16452,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -16035,6 +16558,9 @@ class GreenplumSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -16056,6 +16582,7 @@ class GreenplumSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -16313,6 +16840,9 @@ class HBaseSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -16334,6 +16864,7 @@ class HBaseSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -16465,6 +16996,9 @@ class HdfsReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -16504,6 +17038,7 @@ class HdfsReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -16553,6 +17088,9 @@ class HdfsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -16570,6 +17108,7 @@ class HdfsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'},
}
@@ -16952,6 +17491,8 @@ class HdInsightOnDemandLinkedService(LinkedService):
:param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was
specified, then this property is required. Type: string (or Expression with resultType string).
:type subnet_name: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -17005,6 +17546,7 @@ class HdInsightOnDemandLinkedService(LinkedService):
'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'},
'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'},
'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -17046,6 +17588,7 @@ def __init__(
self.script_actions = kwargs.get('script_actions', None)
self.virtual_network_id = kwargs.get('virtual_network_id', None)
self.subnet_name = kwargs.get('subnet_name', None)
+ self.credential = kwargs.get('credential', None)
class HdInsightPigActivity(ExecutionActivity):
@@ -17530,6 +18073,9 @@ class HiveSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -17551,6 +18097,7 @@ class HiveSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -17755,6 +18302,9 @@ class HttpReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param request_method: The HTTP method used to call the RESTful API. The default is GET. Type:
string (or Expression with resultType string).
:type request_method: object
@@ -17782,6 +18332,7 @@ class HttpReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'request_method': {'key': 'requestMethod', 'type': 'object'},
'request_body': {'key': 'requestBody', 'type': 'object'},
'additional_headers': {'key': 'additionalHeaders', 'type': 'object'},
@@ -17865,6 +18416,9 @@ class HttpSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from
HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string
(or Expression with resultType string), pattern:
@@ -17882,6 +18436,7 @@ class HttpSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -18053,6 +18608,9 @@ class HubspotSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -18074,6 +18632,7 @@ class HubspotSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -18335,6 +18894,9 @@ class ImpalaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -18356,6 +18918,7 @@ class ImpalaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -18469,6 +19032,9 @@ class InformixSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -18486,6 +19052,7 @@ class InformixSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -18517,6 +19084,9 @@ class InformixSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -18537,6 +19107,7 @@ class InformixSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -19225,6 +19796,8 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model):
list[~data_factory_management_client.models.CustomSetupBase]
:param package_stores: Package stores for the SSIS Integration Runtime.
:type package_stores: list[~data_factory_management_client.models.PackageStore]
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_attribute_map = {
@@ -19236,6 +19809,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model):
'edition': {'key': 'edition', 'type': 'str'},
'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'},
'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'},
+ 'credential': {'key': 'credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -19251,6 +19825,7 @@ def __init__(
self.edition = kwargs.get('edition', None)
self.express_custom_setup_properties = kwargs.get('express_custom_setup_properties', None)
self.package_stores = kwargs.get('package_stores', None)
+ self.credential = kwargs.get('credential', None)
class IntegrationRuntimeStatus(msrest.serialization.Model):
@@ -19560,6 +20135,9 @@ class JiraSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -19581,6 +20159,7 @@ class JiraSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -19680,9 +20259,8 @@ class JsonFormat(DatasetStorageFormat):
:param deserializer: Deserializer. Type: string (or Expression with resultType string).
:type deserializer: object
:param file_pattern: File pattern of JSON. To be more specific, the way of separating a
- collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible
- values include: "setOfObjects", "arrayOfObjects".
- :type file_pattern: str or ~data_factory_management_client.models.JsonFormatFilePattern
+ collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive.
+ :type file_pattern: object
:param nesting_separator: The character used to separate nesting levels. Default value is '.'
(dot). Type: string (or Expression with resultType string).
:type nesting_separator: object
@@ -19712,7 +20290,7 @@ class JsonFormat(DatasetStorageFormat):
'type': {'key': 'type', 'type': 'str'},
'serializer': {'key': 'serializer', 'type': 'object'},
'deserializer': {'key': 'deserializer', 'type': 'object'},
- 'file_pattern': {'key': 'filePattern', 'type': 'str'},
+ 'file_pattern': {'key': 'filePattern', 'type': 'object'},
'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'},
'encoding_name': {'key': 'encodingName', 'type': 'object'},
'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'},
@@ -19790,6 +20368,9 @@ class JsonSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Json store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: Json format settings.
@@ -19808,6 +20389,7 @@ class JsonSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'},
}
@@ -19841,6 +20423,9 @@ class JsonSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Json store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: Json format settings.
@@ -19860,6 +20445,7 @@ class JsonSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
@@ -19887,9 +20473,8 @@ class JsonWriteSettings(FormatWriteSettings):
:param type: Required. The write setting type.Constant filled by server.
:type type: str
:param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON
- objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible
- values include: "setOfObjects", "arrayOfObjects".
- :type file_pattern: str or ~data_factory_management_client.models.JsonWriteFilePattern
+ objects will be treated. The default value is 'setOfObjects'. It is case-sensitive.
+ :type file_pattern: object
"""
_validation = {
@@ -19899,7 +20484,7 @@ class JsonWriteSettings(FormatWriteSettings):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
- 'file_pattern': {'key': 'filePattern', 'type': 'str'},
+ 'file_pattern': {'key': 'filePattern', 'type': 'object'},
}
def __init__(
@@ -20538,6 +21123,9 @@ class MagentoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -20559,6 +21147,7 @@ class MagentoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -20573,6 +21162,45 @@ def __init__(
self.query = kwargs.get('query', None)
+class ManagedIdentityCredential(Credential):
+ """Managed identity credential.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Type of credential.Constant filled by server.
+ :type type: str
+ :param description: Credential description.
+ :type description: str
+ :param annotations: List of tags that can be used for describing the Credential.
+ :type annotations: list[object]
+ :param resource_id: The resource id of user assigned managed identity.
+ :type resource_id: str
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'annotations': {'key': 'annotations', 'type': '[object]'},
+ 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ManagedIdentityCredential, self).__init__(**kwargs)
+ self.type = 'ManagedIdentity' # type: str
+ self.resource_id = kwargs.get('resource_id', None)
+
+
class ManagedIntegrationRuntime(IntegrationRuntime):
"""Managed integration runtime, including managed elastic and managed dedicated integration runtimes.
@@ -21219,6 +21847,9 @@ class MariaDbSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -21240,6 +21871,7 @@ class MariaDbSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -21466,6 +22098,9 @@ class MarketoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -21487,6 +22122,7 @@ class MarketoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -21501,6 +22137,29 @@ def __init__(
self.query = kwargs.get('query', None)
+class MetadataItem(msrest.serialization.Model):
+ """Specify the name and value of custom metadata item.
+
+ :param name: Metadata item key name. Type: string (or Expression with resultType string).
+ :type name: object
+ :param value: Metadata item value. Type: string (or Expression with resultType string).
+ :type value: object
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'object'},
+ 'value': {'key': 'value', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MetadataItem, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.value = kwargs.get('value', None)
+
+
class MicrosoftAccessLinkedService(LinkedService):
"""Microsoft Access linked service.
@@ -21600,6 +22259,9 @@ class MicrosoftAccessSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -21617,6 +22279,7 @@ class MicrosoftAccessSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -21648,6 +22311,9 @@ class MicrosoftAccessSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
@@ -21665,6 +22331,7 @@ class MicrosoftAccessSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -21852,6 +22519,65 @@ def __init__(
self.database = kwargs['database']
+class MongoDbAtlasSink(CopySink):
+ """A copy activity MongoDB Atlas sink.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Copy sink type.Constant filled by server.
+ :type type: str
+ :param write_batch_size: Write batch size. Type: integer (or Expression with resultType
+ integer), minimum: 0.
+ :type write_batch_size: object
+ :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType
+ string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type write_batch_timeout: object
+ :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType
+ integer).
+ :type sink_retry_count: object
+ :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string),
+ pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type sink_retry_wait: object
+ :param max_concurrent_connections: The maximum concurrent connection count for the sink data
+ store. Type: integer (or Expression with resultType integer).
+ :type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
+ :param write_behavior: Specifies whether the document with same key to be overwritten (upsert)
+ rather than throw exception (insert). The default value is "insert". Type: string (or
+ Expression with resultType string). Type: string (or Expression with resultType string).
+ :type write_behavior: object
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'},
+ 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'},
+ 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
+ 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
+ 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MongoDbAtlasSink, self).__init__(**kwargs)
+ self.type = 'MongoDbAtlasSink' # type: str
+ self.write_behavior = kwargs.get('write_behavior', None)
+
+
class MongoDbAtlasSource(CopySource):
"""A copy activity source for a MongoDB Atlas database.
@@ -21871,6 +22597,9 @@ class MongoDbAtlasSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param filter: Specifies selection filter using query operators. To return all documents in a
collection, omit this parameter or pass an empty document ({}). Type: string (or Expression
with resultType string).
@@ -21900,6 +22629,7 @@ class MongoDbAtlasSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'filter': {'key': 'filter', 'type': 'object'},
'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'},
'batch_size': {'key': 'batchSize', 'type': 'object'},
@@ -22136,6 +22866,9 @@ class MongoDbSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression
with resultType string).
:type query: object
@@ -22154,6 +22887,7 @@ class MongoDbSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -22281,6 +23015,65 @@ def __init__(
self.database = kwargs['database']
+class MongoDbV2Sink(CopySink):
+ """A copy activity MongoDB sink.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Copy sink type.Constant filled by server.
+ :type type: str
+ :param write_batch_size: Write batch size. Type: integer (or Expression with resultType
+ integer), minimum: 0.
+ :type write_batch_size: object
+ :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType
+ string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type write_batch_timeout: object
+ :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType
+ integer).
+ :type sink_retry_count: object
+ :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string),
+ pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type sink_retry_wait: object
+ :param max_concurrent_connections: The maximum concurrent connection count for the sink data
+ store. Type: integer (or Expression with resultType integer).
+ :type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
+ :param write_behavior: Specifies whether the document with same key to be overwritten (upsert)
+ rather than throw exception (insert). The default value is "insert". Type: string (or
+ Expression with resultType string). Type: string (or Expression with resultType string).
+ :type write_behavior: object
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'},
+ 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'},
+ 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
+ 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
+ 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MongoDbV2Sink, self).__init__(**kwargs)
+ self.type = 'MongoDbV2Sink' # type: str
+ self.write_behavior = kwargs.get('write_behavior', None)
+
+
class MongoDbV2Source(CopySource):
"""A copy activity source for a MongoDB database.
@@ -22300,6 +23093,9 @@ class MongoDbV2Source(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param filter: Specifies selection filter using query operators. To return all documents in a
collection, omit this parameter or pass an empty document ({}). Type: string (or Expression
with resultType string).
@@ -22329,6 +23125,7 @@ class MongoDbV2Source(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'filter': {'key': 'filter', 'type': 'object'},
'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'},
'batch_size': {'key': 'batchSize', 'type': 'object'},
@@ -22424,6 +23221,9 @@ class MySqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -22444,6 +23244,7 @@ class MySqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -22623,6 +23424,9 @@ class NetezzaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -22649,6 +23453,7 @@ class NetezzaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -22932,6 +23737,9 @@ class ODataSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: OData query. For example, "$top=1". Type: string (or Expression with resultType
string).
:type query: object
@@ -22955,6 +23763,7 @@ class ODataSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
@@ -23069,6 +23878,9 @@ class OdbcSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -23086,6 +23898,7 @@ class OdbcSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -23117,6 +23930,9 @@ class OdbcSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -23137,6 +23953,7 @@ class OdbcSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -23363,6 +24180,9 @@ class Office365Source(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param allowed_groups: The groups containing all the users. Type: array of strings (or
Expression with resultType array of strings).
:type allowed_groups: object
@@ -23394,6 +24214,7 @@ class Office365Source(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'allowed_groups': {'key': 'allowedGroups', 'type': 'object'},
'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'},
'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'},
@@ -23783,6 +24604,9 @@ class OracleCloudStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -23823,6 +24647,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -24103,6 +24928,9 @@ class OracleServiceCloudSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -24124,6 +24952,7 @@ class OracleServiceCloudSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -24163,6 +24992,9 @@ class OracleSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -24180,6 +25012,7 @@ class OracleSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -24211,6 +25044,9 @@ class OracleSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType
string).
:type oracle_reader_query: object
@@ -24237,6 +25073,7 @@ class OracleSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'partition_option': {'key': 'partitionOption', 'type': 'object'},
@@ -24355,8 +25192,9 @@ class OrcDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the ORC data storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo".
- :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec
+ :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with
+ resultType string).
+ :type orc_compression_codec: object
"""
_validation = {
@@ -24375,7 +25213,7 @@ class OrcDataset(Dataset):
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
- 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'},
+ 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'},
}
def __init__(
@@ -24448,6 +25286,9 @@ class OrcSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: ORC store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: ORC format settings.
@@ -24466,6 +25307,7 @@ class OrcSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'},
}
@@ -24499,6 +25341,9 @@ class OrcSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: ORC store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
@@ -24516,6 +25361,7 @@ class OrcSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -24659,9 +25505,9 @@ class ParquetDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the parquet storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2",
- "deflate", "zipDeflate", "lz4", "tar", "tarGZip".
- :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec
+ :param compression_codec: The data compressionCodec. Type: string (or Expression with
+ resultType string).
+ :type compression_codec: object
"""
_validation = {
@@ -24680,7 +25526,7 @@ class ParquetDataset(Dataset):
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
- 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'},
+ 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'},
}
def __init__(
@@ -24753,6 +25599,9 @@ class ParquetSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Parquet store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: Parquet format settings.
@@ -24771,6 +25620,7 @@ class ParquetSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'},
}
@@ -24804,6 +25654,9 @@ class ParquetSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Parquet store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
@@ -24821,6 +25674,7 @@ class ParquetSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -25029,6 +25883,9 @@ class PaypalSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -25050,6 +25907,7 @@ class PaypalSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -25258,6 +26116,9 @@ class PhoenixSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -25279,6 +26140,7 @@ class PhoenixSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -25598,18 +26460,26 @@ class PipelineRunInvokedBy(msrest.serialization.Model):
:vartype id: str
:ivar invoked_by_type: The type of the entity that started the run.
:vartype invoked_by_type: str
+ :ivar pipeline_name: The name of the pipeline that triggered the run, if any.
+ :vartype pipeline_name: str
+ :ivar pipeline_run_id: The run id of the pipeline that triggered the run, if any.
+ :vartype pipeline_run_id: str
"""
_validation = {
'name': {'readonly': True},
'id': {'readonly': True},
'invoked_by_type': {'readonly': True},
+ 'pipeline_name': {'readonly': True},
+ 'pipeline_run_id': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'invoked_by_type': {'key': 'invokedByType', 'type': 'str'},
+ 'pipeline_name': {'key': 'pipelineName', 'type': 'str'},
+ 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
}
def __init__(
@@ -25620,6 +26490,8 @@ def __init__(
self.name = None
self.id = None
self.invoked_by_type = None
+ self.pipeline_name = None
+ self.pipeline_run_id = None
class PipelineRunsQueryResponse(msrest.serialization.Model):
@@ -25768,6 +26640,9 @@ class PostgreSqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -25788,6 +26663,7 @@ class PostgreSqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -26072,6 +26948,9 @@ class PrestoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -26093,6 +26972,7 @@ class PrestoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -26544,6 +27424,9 @@ class QuickBooksSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -26565,6 +27448,7 @@ class QuickBooksSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -26739,6 +27623,9 @@ class RelationalSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
@@ -26756,6 +27643,7 @@ class RelationalSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -27085,6 +27973,9 @@ class ResponsysSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -27106,6 +27997,7 @@ class ResponsysSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -27254,6 +28146,8 @@ class RestServiceLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -27281,6 +28175,7 @@ class RestServiceLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -27301,6 +28196,7 @@ def __init__(
self.azure_cloud_type = kwargs.get('azure_cloud_type', None)
self.aad_resource_id = kwargs.get('aad_resource_id', None)
self.encrypted_credential = kwargs.get('encrypted_credential', None)
+ self.credential = kwargs.get('credential', None)
class RestSink(CopySink):
@@ -27328,6 +28224,9 @@ class RestSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param request_method: The HTTP method used to call the RESTful API. The default is POST. Type:
string (or Expression with resultType string).
:type request_method: object
@@ -27358,6 +28257,7 @@ class RestSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'request_method': {'key': 'requestMethod', 'type': 'object'},
'additional_headers': {'key': 'additionalHeaders', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
@@ -27397,6 +28297,9 @@ class RestSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param request_method: The HTTP method used to call the RESTful API. The default is GET. Type:
string (or Expression with resultType string).
:type request_method: object
@@ -27431,6 +28334,7 @@ class RestSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'request_method': {'key': 'requestMethod', 'type': 'object'},
'request_body': {'key': 'requestBody', 'type': 'object'},
'additional_headers': {'key': 'additionalHeaders', 'type': 'object'},
@@ -27832,6 +28736,9 @@ class SalesforceMarketingCloudSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -27853,6 +28760,7 @@ class SalesforceMarketingCloudSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -28087,6 +28995,9 @@ class SalesforceServiceCloudSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: The write behavior for the operation. Default is Insert. Possible values
include: "Insert", "Upsert".
:type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior
@@ -28114,6 +29025,7 @@ class SalesforceServiceCloudSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
@@ -28149,6 +29061,9 @@ class SalesforceServiceCloudSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param read_behavior: The read behavior for the operation. Default is Query. Possible values
@@ -28169,6 +29084,7 @@ class SalesforceServiceCloudSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'read_behavior': {'key': 'readBehavior', 'type': 'str'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
@@ -28210,6 +29126,9 @@ class SalesforceSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: The write behavior for the operation. Default is Insert. Possible values
include: "Insert", "Upsert".
:type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior
@@ -28237,6 +29156,7 @@ class SalesforceSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
@@ -28272,6 +29192,9 @@ class SalesforceSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -28295,6 +29218,7 @@ class SalesforceSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -28458,6 +29382,9 @@ class SapBwSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -28478,6 +29405,7 @@ class SapBwSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -28640,6 +29568,9 @@ class SapCloudForCustomerSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible
values include: "Insert", "Update".
:type write_behavior: str or
@@ -28663,6 +29594,7 @@ class SapCloudForCustomerSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -28696,6 +29628,9 @@ class SapCloudForCustomerSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -28722,6 +29657,7 @@ class SapCloudForCustomerSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -28880,6 +29816,9 @@ class SapEccSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -28906,6 +29845,7 @@ class SapEccSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -29033,6 +29973,9 @@ class SapHanaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -29062,6 +30005,7 @@ class SapHanaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -29263,6 +30207,9 @@ class SapOpenHubSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -29295,6 +30242,7 @@ class SapOpenHubSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'},
@@ -29625,6 +30573,9 @@ class SapTableSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -29672,6 +30623,7 @@ class SapTableSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'row_count': {'key': 'rowCount', 'type': 'object'},
@@ -29806,9 +30758,8 @@ class ScriptAction(msrest.serialization.Model):
:type name: str
:param uri: Required. The URI for the script action.
:type uri: str
- :param roles: Required. The node types on which the script action should be executed. Possible
- values include: "Headnode", "Workernode", "Zookeeper".
- :type roles: str or ~data_factory_management_client.models.HdiNodeTypes
+ :param roles: Required. The node types on which the script action should be executed.
+ :type roles: str
:param parameters: The parameters for the script action.
:type parameters: str
"""
@@ -30364,6 +31315,9 @@ class ServiceNowSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -30385,6 +31339,7 @@ class ServiceNowSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -30399,6 +31354,54 @@ def __init__(
self.query = kwargs.get('query', None)
+class ServicePrincipalCredential(Credential):
+ """Service principal credential.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Type of credential.Constant filled by server.
+ :type type: str
+ :param description: Credential description.
+ :type description: str
+ :param annotations: List of tags that can be used for describing the Credential.
+ :type annotations: list[object]
+ :param service_principal_id: The app ID of the service principal used to authenticate.
+ :type service_principal_id: object
+ :param service_principal_key: The key of the service principal used to authenticate.
+ :type service_principal_key:
+ ~data_factory_management_client.models.AzureKeyVaultSecretReference
+ :param tenant: The ID of the tenant to which the service principal belongs.
+ :type tenant: object
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'annotations': {'key': 'annotations', 'type': '[object]'},
+ 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
+ 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'AzureKeyVaultSecretReference'},
+ 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServicePrincipalCredential, self).__init__(**kwargs)
+ self.type = 'ServicePrincipal' # type: str
+ self.service_principal_id = kwargs.get('service_principal_id', None)
+ self.service_principal_key = kwargs.get('service_principal_key', None)
+ self.tenant = kwargs.get('tenant', None)
+
+
class SetVariableActivity(Activity):
"""Set value for a Variable.
@@ -30499,6 +31502,9 @@ class SftpReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -30536,6 +31542,7 @@ class SftpReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -30678,6 +31685,9 @@ class SftpWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default
@@ -30697,6 +31707,7 @@ class SftpWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'operation_timeout': {'key': 'operationTimeout', 'type': 'object'},
'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'},
@@ -30864,6 +31875,9 @@ class SharePointOnlineListSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: The OData query to filter the data in SharePoint Online list. For example,
"$top=1". Type: string (or Expression with resultType string).
:type query: object
@@ -30883,6 +31897,7 @@ class SharePointOnlineListSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -31047,6 +32062,9 @@ class ShopifySource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -31068,6 +32086,7 @@ class ShopifySource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -31339,6 +32358,9 @@ class SnowflakeSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -31358,6 +32380,7 @@ class SnowflakeSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'},
}
@@ -31391,6 +32414,9 @@ class SnowflakeSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Snowflake Sql query. Type: string (or Expression with resultType string).
:type query: object
:param export_settings: Snowflake export settings.
@@ -31407,6 +32433,7 @@ class SnowflakeSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'},
}
@@ -31624,6 +32651,9 @@ class SparkSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -31645,6 +32675,7 @@ class SparkSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -31722,6 +32753,9 @@ class SqlDwSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -31739,6 +32773,14 @@ class SqlDwSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into azure SQL DW. Type:
+ SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL DW upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlDwUpsertSettings
"""
_validation = {
@@ -31753,12 +32795,16 @@ class SqlDwSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'},
'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'},
'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'},
'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlDwUpsertSettings'},
}
def __init__(
@@ -31773,6 +32819,9 @@ def __init__(
self.allow_copy_command = kwargs.get('allow_copy_command', None)
self.copy_command_settings = kwargs.get('copy_command_settings', None)
self.table_option = kwargs.get('table_option', None)
+ self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None)
+ self.write_behavior = kwargs.get('write_behavior', None)
+ self.upsert_settings = kwargs.get('upsert_settings', None)
class SqlDwSource(TabularSource):
@@ -31794,6 +32843,9 @@ class SqlDwSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -31828,6 +32880,7 @@ class SqlDwSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
@@ -31850,6 +32903,31 @@ def __init__(
self.partition_settings = kwargs.get('partition_settings', None)
+class SqlDwUpsertSettings(msrest.serialization.Model):
+ """Sql DW upsert option settings.
+
+ :param interim_schema_name: Schema name for interim table. Type: string (or Expression with
+ resultType string).
+ :type interim_schema_name: object
+ :param keys: Key column names for unique row identification. Type: array of strings (or
+ Expression with resultType array of strings).
+ :type keys: object
+ """
+
+ _attribute_map = {
+ 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'},
+ 'keys': {'key': 'keys', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SqlDwUpsertSettings, self).__init__(**kwargs)
+ self.interim_schema_name = kwargs.get('interim_schema_name', None)
+ self.keys = kwargs.get('keys', None)
+
+
class SqlMiSink(CopySink):
"""A copy activity Azure SQL Managed Instance sink.
@@ -31875,6 +32953,9 @@ class SqlMiSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -31893,6 +32974,14 @@ class SqlMiSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: White behavior when copying data into azure SQL MI. Type:
+ SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -31907,12 +32996,16 @@ class SqlMiSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -31927,6 +33020,9 @@ def __init__(
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None)
self.table_option = kwargs.get('table_option', None)
+ self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None)
+ self.write_behavior = kwargs.get('write_behavior', None)
+ self.upsert_settings = kwargs.get('upsert_settings', None)
class SqlMiSource(TabularSource):
@@ -31948,6 +33044,9 @@ class SqlMiSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -31983,6 +33082,7 @@ class SqlMiSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
@@ -32134,6 +33234,9 @@ class SqlServerSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -32152,6 +33255,14 @@ class SqlServerSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into sql server. Type:
+ SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -32166,12 +33277,16 @@ class SqlServerSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -32186,6 +33301,9 @@ def __init__(
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None)
self.table_option = kwargs.get('table_option', None)
+ self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None)
+ self.write_behavior = kwargs.get('write_behavior', None)
+ self.upsert_settings = kwargs.get('upsert_settings', None)
class SqlServerSource(TabularSource):
@@ -32207,6 +33325,9 @@ class SqlServerSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -32242,6 +33363,7 @@ class SqlServerSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
@@ -32420,6 +33542,9 @@ class SqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -32438,6 +33563,14 @@ class SqlSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum
+ (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -32452,12 +33585,16 @@ class SqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -32472,6 +33609,9 @@ def __init__(
self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None)
self.stored_procedure_table_type_parameter_name = kwargs.get('stored_procedure_table_type_parameter_name', None)
self.table_option = kwargs.get('table_option', None)
+ self.sql_writer_use_table_lock = kwargs.get('sql_writer_use_table_lock', None)
+ self.write_behavior = kwargs.get('write_behavior', None)
+ self.upsert_settings = kwargs.get('upsert_settings', None)
class SqlSource(TabularSource):
@@ -32493,6 +33633,9 @@ class SqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -32530,6 +33673,7 @@ class SqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
@@ -32554,6 +33698,36 @@ def __init__(
self.partition_settings = kwargs.get('partition_settings', None)
+class SqlUpsertSettings(msrest.serialization.Model):
+ """Sql upsert option settings.
+
+ :param use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean
+ (or Expression with resultType boolean).
+ :type use_temp_db: object
+ :param interim_schema_name: Schema name for interim table. Type: string (or Expression with
+ resultType string).
+ :type interim_schema_name: object
+ :param keys: Key column names for unique row identification. Type: array of strings (or
+ Expression with resultType array of strings).
+ :type keys: object
+ """
+
+ _attribute_map = {
+ 'use_temp_db': {'key': 'useTempDB', 'type': 'object'},
+ 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'},
+ 'keys': {'key': 'keys', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SqlUpsertSettings, self).__init__(**kwargs)
+ self.use_temp_db = kwargs.get('use_temp_db', None)
+ self.interim_schema_name = kwargs.get('interim_schema_name', None)
+ self.keys = kwargs.get('keys', None)
+
+
class SquareLinkedService(LinkedService):
"""Square Service linked service.
@@ -32716,6 +33890,9 @@ class SquareSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -32737,6 +33914,7 @@ class SquareSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -33679,6 +34857,9 @@ class SybaseSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -33699,6 +34880,7 @@ class SybaseSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -34030,6 +35212,9 @@ class TeradataSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -34056,6 +35241,7 @@ class TeradataSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -35107,6 +36293,9 @@ class VerticaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -35128,6 +36317,7 @@ class VerticaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -35348,10 +36538,7 @@ def __init__(
class WebActivityAuthentication(msrest.serialization.Model):
"""Web activity authentication properties.
- All required parameters must be populated in order to send to Azure.
-
- :param type: Required. Web activity authentication
- (Basic/ClientCertificate/MSI/ServicePrincipal).
+ :param type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal).
:type type: str
:param pfx: Base64-encoded contents of a PFX file or Certificate when used for
ServicePrincipal.
@@ -35368,12 +36555,10 @@ class WebActivityAuthentication(msrest.serialization.Model):
:param user_tenant: TenantId for which Azure Auth token will be requested when using
ServicePrincipal Authentication. Type: string (or Expression with resultType string).
:type user_tenant: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
- _validation = {
- 'type': {'required': True},
- }
-
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'pfx': {'key': 'pfx', 'type': 'SecretBase'},
@@ -35381,6 +36566,7 @@ class WebActivityAuthentication(msrest.serialization.Model):
'password': {'key': 'password', 'type': 'SecretBase'},
'resource': {'key': 'resource', 'type': 'object'},
'user_tenant': {'key': 'userTenant', 'type': 'object'},
+ 'credential': {'key': 'credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -35388,12 +36574,13 @@ def __init__(
**kwargs
):
super(WebActivityAuthentication, self).__init__(**kwargs)
- self.type = kwargs['type']
+ self.type = kwargs.get('type', None)
self.pfx = kwargs.get('pfx', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.resource = kwargs.get('resource', None)
self.user_tenant = kwargs.get('user_tenant', None)
+ self.credential = kwargs.get('credential', None)
class WebLinkedServiceTypeProperties(msrest.serialization.Model):
@@ -35699,6 +36886,9 @@ class WebSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
array of objects (or Expression with resultType array of objects).
:type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
@@ -35714,6 +36904,7 @@ class WebSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -35950,6 +37141,9 @@ class XeroSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -35971,6 +37165,7 @@ class XeroSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -36133,6 +37328,9 @@ class XmlSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Xml store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: Xml format settings.
@@ -36152,6 +37350,7 @@ class XmlSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
@@ -36355,6 +37554,9 @@ class ZohoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -36376,6 +37578,7 @@ class ZohoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py
index f6ebc8328ae..81e4a6fd76d 100644
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py
+++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py
@@ -711,6 +711,9 @@ class CopySource(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -723,6 +726,7 @@ class CopySource(msrest.serialization.Model):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
_subtype_map = {
@@ -736,6 +740,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
**kwargs
):
super(CopySource, self).__init__(**kwargs)
@@ -744,6 +749,7 @@ def __init__(
self.source_retry_count = source_retry_count
self.source_retry_wait = source_retry_wait
self.max_concurrent_connections = max_concurrent_connections
+ self.disable_metrics_collection = disable_metrics_collection
class TabularSource(CopySource):
@@ -768,6 +774,9 @@ class TabularSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -786,6 +795,7 @@ class TabularSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -801,11 +811,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'TabularSource' # type: str
self.query_timeout = query_timeout
self.additional_columns = additional_columns
@@ -830,6 +841,9 @@ class AmazonMwsSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -851,6 +865,7 @@ class AmazonMwsSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -863,12 +878,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(AmazonMwsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AmazonMwsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AmazonMWSSource' # type: str
self.query = query
@@ -977,6 +993,9 @@ class AmazonRedshiftSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -1001,6 +1020,7 @@ class AmazonRedshiftSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -1014,13 +1034,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
redshift_unload_settings: Optional["RedshiftUnloadSettings"] = None,
**kwargs
):
- super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AmazonRedshiftSource' # type: str
self.query = query
self.redshift_unload_settings = redshift_unload_settings
@@ -1307,6 +1328,9 @@ class StoreReadSettings(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -1317,6 +1341,7 @@ class StoreReadSettings(msrest.serialization.Model):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
_subtype_map = {
@@ -1328,12 +1353,14 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
**kwargs
):
super(StoreReadSettings, self).__init__(**kwargs)
self.additional_properties = additional_properties
self.type = 'StoreReadSettings' # type: str
self.max_concurrent_connections = max_concurrent_connections
+ self.disable_metrics_collection = disable_metrics_collection
class AmazonS3CompatibleReadSettings(StoreReadSettings):
@@ -1349,6 +1376,9 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -1389,6 +1419,7 @@ class AmazonS3CompatibleReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -1406,6 +1437,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -1418,7 +1450,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AmazonS3CompatibleReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -1692,6 +1724,9 @@ class AmazonS3ReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -1732,6 +1767,7 @@ class AmazonS3ReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -1749,6 +1785,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -1761,7 +1798,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AmazonS3ReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -1887,10 +1924,9 @@ class AvroDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the avro storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz",
- "bzip2".
- :type avro_compression_codec: str or
- ~data_factory_management_client.models.AvroCompressionCodec
+ :param avro_compression_codec: The data avroCompressionCodec. Type: string (or Expression with
+ resultType string).
+ :type avro_compression_codec: object
:param avro_compression_level:
:type avro_compression_level: int
"""
@@ -1912,7 +1948,7 @@ class AvroDataset(Dataset):
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
- 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'},
+ 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'},
'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'},
}
@@ -1928,7 +1964,7 @@ def __init__(
annotations: Optional[List[object]] = None,
folder: Optional["DatasetFolder"] = None,
location: Optional["DatasetLocation"] = None,
- avro_compression_codec: Optional[Union[str, "AvroCompressionCodec"]] = None,
+ avro_compression_codec: Optional[object] = None,
avro_compression_level: Optional[int] = None,
**kwargs
):
@@ -2031,7 +2067,7 @@ class CopySink(msrest.serialization.Model):
"""A copy activity sink.
You probably want to use the sub-classes and not this class directly. Known
- sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink.
+ sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, MongoDbAtlasSink, MongoDbV2Sink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink.
All required parameters must be populated in order to send to Azure.
@@ -2055,6 +2091,9 @@ class CopySink(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -2069,10 +2108,11 @@ class CopySink(msrest.serialization.Model):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
_subtype_map = {
- 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'}
+ 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'}
}
def __init__(
@@ -2084,6 +2124,7 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
**kwargs
):
super(CopySink, self).__init__(**kwargs)
@@ -2094,6 +2135,7 @@ def __init__(
self.sink_retry_count = sink_retry_count
self.sink_retry_wait = sink_retry_wait
self.max_concurrent_connections = max_concurrent_connections
+ self.disable_metrics_collection = disable_metrics_collection
class AvroSink(CopySink):
@@ -2121,6 +2163,9 @@ class AvroSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Avro store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: Avro format settings.
@@ -2139,6 +2184,7 @@ class AvroSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'AvroWriteSettings'},
}
@@ -2152,11 +2198,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreWriteSettings"] = None,
format_settings: Optional["AvroWriteSettings"] = None,
**kwargs
):
- super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AvroSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AvroSink' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -2181,6 +2228,9 @@ class AvroSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Avro store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
@@ -2198,6 +2248,7 @@ class AvroSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -2209,11 +2260,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AvroSource' # type: str
self.store_settings = store_settings
self.additional_columns = additional_columns
@@ -2411,6 +2463,8 @@ class AzureBatchLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -2434,6 +2488,7 @@ class AzureBatchLinkedService(LinkedService):
'pool_name': {'key': 'typeProperties.poolName', 'type': 'object'},
'linked_service_name': {'key': 'typeProperties.linkedServiceName', 'type': 'LinkedServiceReference'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -2450,6 +2505,7 @@ def __init__(
annotations: Optional[List[object]] = None,
access_key: Optional["SecretBase"] = None,
encrypted_credential: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureBatchLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -2460,6 +2516,7 @@ def __init__(
self.pool_name = pool_name
self.linked_service_name = linked_service_name
self.encrypted_credential = encrypted_credential
+ self.credential = credential
class AzureBlobDataset(Dataset):
@@ -2691,6 +2748,8 @@ class AzureBlobFsLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -2712,6 +2771,7 @@ class AzureBlobFsLinkedService(LinkedService):
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -2729,6 +2789,7 @@ def __init__(
tenant: Optional[object] = None,
azure_cloud_type: Optional[object] = None,
encrypted_credential: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureBlobFsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -2740,6 +2801,7 @@ def __init__(
self.tenant = tenant
self.azure_cloud_type = azure_cloud_type
self.encrypted_credential = encrypted_credential
+ self.credential = credential
class AzureBlobFsLocation(DatasetLocation):
@@ -2802,6 +2864,9 @@ class AzureBlobFsReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -2839,6 +2904,7 @@ class AzureBlobFsReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -2855,6 +2921,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -2866,7 +2933,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(AzureBlobFsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureBlobFsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureBlobFSReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -2904,8 +2971,14 @@ class AzureBlobFsSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
+ :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects
+ (or Expression with resultType array of objects).
+ :type metadata: list[~data_factory_management_client.models.MetadataItem]
"""
_validation = {
@@ -2920,7 +2993,9 @@ class AzureBlobFsSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
+ 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'},
}
def __init__(
@@ -2932,12 +3007,15 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
+ metadata: Optional[List["MetadataItem"]] = None,
**kwargs
):
- super(AzureBlobFsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureBlobFsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureBlobFSSink' # type: str
self.copy_behavior = copy_behavior
+ self.metadata = metadata
class AzureBlobFsSource(CopySource):
@@ -2959,6 +3037,9 @@ class AzureBlobFsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType
boolean).
:type treat_empty_as_null: object
@@ -2980,6 +3061,7 @@ class AzureBlobFsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'},
'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
@@ -2992,12 +3074,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
treat_empty_as_null: Optional[object] = None,
skip_header_line_count: Optional[object] = None,
recursive: Optional[object] = None,
**kwargs
):
- super(AzureBlobFsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureBlobFsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureBlobFSSource' # type: str
self.treat_empty_as_null = treat_empty_as_null
self.skip_header_line_count = skip_header_line_count
@@ -3020,6 +3103,9 @@ class StoreWriteSettings(msrest.serialization.Model):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -3032,6 +3118,7 @@ class StoreWriteSettings(msrest.serialization.Model):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -3044,6 +3131,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
**kwargs
):
@@ -3051,6 +3139,7 @@ def __init__(
self.additional_properties = additional_properties
self.type = 'StoreWriteSettings' # type: str
self.max_concurrent_connections = max_concurrent_connections
+ self.disable_metrics_collection = disable_metrics_collection
self.copy_behavior = copy_behavior
@@ -3067,6 +3156,9 @@ class AzureBlobFsWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer
@@ -3082,6 +3174,7 @@ class AzureBlobFsWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'},
}
@@ -3091,11 +3184,12 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
block_size_in_mb: Optional[object] = None,
**kwargs
):
- super(AzureBlobFsWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
+ super(AzureBlobFsWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs)
self.type = 'AzureBlobFSWriteSettings' # type: str
self.block_size_in_mb = block_size_in_mb
@@ -3153,6 +3247,8 @@ class AzureBlobStorageLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: str
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -3177,6 +3273,7 @@ class AzureBlobStorageLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -3198,6 +3295,7 @@ def __init__(
azure_cloud_type: Optional[object] = None,
account_kind: Optional[str] = None,
encrypted_credential: Optional[str] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -3213,6 +3311,7 @@ def __init__(
self.azure_cloud_type = azure_cloud_type
self.account_kind = account_kind
self.encrypted_credential = encrypted_credential
+ self.credential = credential
class AzureBlobStorageLocation(DatasetLocation):
@@ -3275,6 +3374,9 @@ class AzureBlobStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -3315,6 +3417,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -3332,6 +3435,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -3344,7 +3448,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureBlobStorageReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -3371,6 +3475,9 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param block_size_in_mb: Indicates the block size(MB) when writing data to blob. Type: integer
@@ -3386,6 +3493,7 @@ class AzureBlobStorageWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'block_size_in_mb': {'key': 'blockSizeInMB', 'type': 'object'},
}
@@ -3395,11 +3503,12 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
block_size_in_mb: Optional[object] = None,
**kwargs
):
- super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
+ super(AzureBlobStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs)
self.type = 'AzureBlobStorageWriteSettings' # type: str
self.block_size_in_mb = block_size_in_mb
@@ -3741,6 +3850,9 @@ class AzureDatabricksDeltaLakeSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -3761,6 +3873,7 @@ class AzureDatabricksDeltaLakeSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'},
}
@@ -3774,11 +3887,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
import_settings: Optional["AzureDatabricksDeltaLakeImportCommand"] = None,
**kwargs
):
- super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDatabricksDeltaLakeSink' # type: str
self.pre_copy_script = pre_copy_script
self.import_settings = import_settings
@@ -3803,6 +3917,9 @@ class AzureDatabricksDeltaLakeSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with
resultType string).
:type query: object
@@ -3821,6 +3938,7 @@ class AzureDatabricksDeltaLakeSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'},
}
@@ -3832,11 +3950,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
export_settings: Optional["AzureDatabricksDeltaLakeExportCommand"] = None,
**kwargs
):
- super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDatabricksDeltaLakeSource' # type: str
self.query = query
self.export_settings = export_settings
@@ -3925,6 +4044,8 @@ class AzureDatabricksLinkedService(LinkedService):
:param policy_id: The policy id for limiting the ability to configure clusters based on a user
defined set of rules. Type: string (or Expression with resultType string).
:type policy_id: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -3957,6 +4078,7 @@ class AzureDatabricksLinkedService(LinkedService):
'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -3985,6 +4107,7 @@ def __init__(
new_cluster_enable_elastic_disk: Optional[object] = None,
encrypted_credential: Optional[object] = None,
policy_id: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -4007,6 +4130,7 @@ def __init__(
self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk
self.encrypted_credential = encrypted_credential
self.policy_id = policy_id
+ self.credential = credential
class ExecutionActivity(Activity):
@@ -4177,6 +4301,8 @@ class AzureDataExplorerLinkedService(LinkedService):
:param tenant: The name or ID of the tenant to which the service principal belongs. Type:
string (or Expression with resultType string).
:type tenant: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -4197,6 +4323,7 @@ class AzureDataExplorerLinkedService(LinkedService):
'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'},
'database': {'key': 'typeProperties.database', 'type': 'object'},
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -4212,6 +4339,7 @@ def __init__(
service_principal_id: Optional[object] = None,
service_principal_key: Optional["SecretBase"] = None,
tenant: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -4221,6 +4349,7 @@ def __init__(
self.service_principal_key = service_principal_key
self.database = database
self.tenant = tenant
+ self.credential = credential
class AzureDataExplorerSink(CopySink):
@@ -4248,6 +4377,9 @@ class AzureDataExplorerSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param ingestion_mapping_name: A name of a pre-created csv mapping that was defined on the
target Kusto table. Type: string.
:type ingestion_mapping_name: object
@@ -4271,6 +4403,7 @@ class AzureDataExplorerSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'ingestion_mapping_name': {'key': 'ingestionMappingName', 'type': 'object'},
'ingestion_mapping_as_json': {'key': 'ingestionMappingAsJson', 'type': 'object'},
'flush_immediately': {'key': 'flushImmediately', 'type': 'object'},
@@ -4285,12 +4418,13 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
ingestion_mapping_name: Optional[object] = None,
ingestion_mapping_as_json: Optional[object] = None,
flush_immediately: Optional[object] = None,
**kwargs
):
- super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDataExplorerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDataExplorerSink' # type: str
self.ingestion_mapping_name = ingestion_mapping_name
self.ingestion_mapping_as_json = ingestion_mapping_as_json
@@ -4316,6 +4450,9 @@ class AzureDataExplorerSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Required. Database query. Should be a Kusto Query Language (KQL) query. Type:
string (or Expression with resultType string).
:type query: object
@@ -4341,6 +4478,7 @@ class AzureDataExplorerSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'no_truncation': {'key': 'noTruncation', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
@@ -4355,12 +4493,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
no_truncation: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDataExplorerSource' # type: str
self.query = query
self.no_truncation = no_truncation
@@ -4668,6 +4807,8 @@ class AzureDataLakeStoreLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -4691,6 +4832,7 @@ class AzureDataLakeStoreLinkedService(LinkedService):
'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'},
'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -4710,6 +4852,7 @@ def __init__(
subscription_id: Optional[object] = None,
resource_group_name: Optional[object] = None,
encrypted_credential: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureDataLakeStoreLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -4723,6 +4866,7 @@ def __init__(
self.subscription_id = subscription_id
self.resource_group_name = resource_group_name
self.encrypted_credential = encrypted_credential
+ self.credential = credential
class AzureDataLakeStoreLocation(DatasetLocation):
@@ -4779,6 +4923,9 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -4824,6 +4971,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -4842,6 +4990,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -4855,7 +5004,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDataLakeStoreReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -4895,6 +5044,9 @@ class AzureDataLakeStoreSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param enable_adls_single_file_parallel: Single File Parallel.
@@ -4913,6 +5065,7 @@ class AzureDataLakeStoreSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'enable_adls_single_file_parallel': {'key': 'enableAdlsSingleFileParallel', 'type': 'object'},
}
@@ -4926,11 +5079,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
enable_adls_single_file_parallel: Optional[object] = None,
**kwargs
):
- super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDataLakeStoreSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDataLakeStoreSink' # type: str
self.copy_behavior = copy_behavior
self.enable_adls_single_file_parallel = enable_adls_single_file_parallel
@@ -4955,6 +5109,9 @@ class AzureDataLakeStoreSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -4970,6 +5127,7 @@ class AzureDataLakeStoreSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
}
@@ -4980,10 +5138,11 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
**kwargs
):
- super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureDataLakeStoreSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureDataLakeStoreSource' # type: str
self.recursive = recursive
@@ -5001,6 +5160,9 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param expiry_date_time: Specifies the expiry time of the written files. The time is applied to
@@ -5017,6 +5179,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'},
}
@@ -5026,11 +5189,12 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
expiry_date_time: Optional[object] = None,
**kwargs
):
- super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
+ super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs)
self.type = 'AzureDataLakeStoreWriteSettings' # type: str
self.expiry_date_time = expiry_date_time
@@ -5193,6 +5357,9 @@ class AzureFileStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -5233,6 +5400,7 @@ class AzureFileStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -5250,6 +5418,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -5262,7 +5431,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureFileStorageReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -5289,6 +5458,9 @@ class AzureFileStorageWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -5301,6 +5473,7 @@ class AzureFileStorageWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -5309,10 +5482,11 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
**kwargs
):
- super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
+ super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs)
self.type = 'AzureFileStorageWriteSettings' # type: str
@@ -5426,6 +5600,13 @@ class AzureFunctionLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
+ :param resource_id: Allowed token audiences for azure function.
+ :type resource_id: object
+ :param authentication: Type of authentication (Required to specify MSI) used to connect to
+ AzureFunction. Type: string (or Expression with resultType string).
+ :type authentication: object
"""
_validation = {
@@ -5443,6 +5624,9 @@ class AzureFunctionLinkedService(LinkedService):
'function_app_url': {'key': 'typeProperties.functionAppUrl', 'type': 'object'},
'function_key': {'key': 'typeProperties.functionKey', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
+ 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'object'},
+ 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'},
}
def __init__(
@@ -5456,6 +5640,9 @@ def __init__(
annotations: Optional[List[object]] = None,
function_key: Optional["SecretBase"] = None,
encrypted_credential: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
+ resource_id: Optional[object] = None,
+ authentication: Optional[object] = None,
**kwargs
):
super(AzureFunctionLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -5463,6 +5650,9 @@ def __init__(
self.function_app_url = function_app_url
self.function_key = function_key
self.encrypted_credential = encrypted_credential
+ self.credential = credential
+ self.resource_id = resource_id
+ self.authentication = authentication
class AzureKeyVaultLinkedService(LinkedService):
@@ -5486,6 +5676,8 @@ class AzureKeyVaultLinkedService(LinkedService):
:param base_url: Required. The base URL of the Azure Key Vault. e.g.
https://myakv.vault.azure.net Type: string (or Expression with resultType string).
:type base_url: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -5501,6 +5693,7 @@ class AzureKeyVaultLinkedService(LinkedService):
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'base_url': {'key': 'typeProperties.baseUrl', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -5512,11 +5705,13 @@ def __init__(
description: Optional[str] = None,
parameters: Optional[Dict[str, "ParameterSpecification"]] = None,
annotations: Optional[List[object]] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureKeyVaultLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
self.type = 'AzureKeyVault' # type: str
self.base_url = base_url
+ self.credential = credential
class SecretBase(msrest.serialization.Model):
@@ -5680,6 +5875,9 @@ class AzureMariaDbSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -5701,6 +5899,7 @@ class AzureMariaDbSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -5713,12 +5912,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(AzureMariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AzureMariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AzureMariaDBSource' # type: str
self.query = query
@@ -6024,6 +6224,9 @@ class AzureMlLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param authentication: Type of authentication (Required to specify MSI) used to connect to
+ AzureML. Type: string (or Expression with resultType string).
+ :type authentication: object
"""
_validation = {
@@ -6046,6 +6249,7 @@ class AzureMlLinkedService(LinkedService):
'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'},
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'},
}
def __init__(
@@ -6063,6 +6267,7 @@ def __init__(
service_principal_key: Optional["SecretBase"] = None,
tenant: Optional[object] = None,
encrypted_credential: Optional[object] = None,
+ authentication: Optional[object] = None,
**kwargs
):
super(AzureMlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -6074,6 +6279,7 @@ def __init__(
self.service_principal_key = service_principal_key
self.tenant = tenant
self.encrypted_credential = encrypted_credential
+ self.authentication = authentication
class AzureMlServiceLinkedService(LinkedService):
@@ -6375,6 +6581,9 @@ class AzureMySqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -6392,6 +6601,7 @@ class AzureMySqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -6404,10 +6614,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
**kwargs
):
- super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureMySqlSink' # type: str
self.pre_copy_script = pre_copy_script
@@ -6431,6 +6642,9 @@ class AzureMySqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -6451,6 +6665,7 @@ class AzureMySqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -6463,12 +6678,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AzureMySqlSource' # type: str
self.query = query
@@ -6638,6 +6854,9 @@ class AzurePostgreSqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -6655,6 +6874,7 @@ class AzurePostgreSqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -6667,10 +6887,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
**kwargs
):
- super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzurePostgreSqlSink' # type: str
self.pre_copy_script = pre_copy_script
@@ -6694,6 +6915,9 @@ class AzurePostgreSqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -6715,6 +6939,7 @@ class AzurePostgreSqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -6727,12 +6952,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AzurePostgreSqlSource' # type: str
self.query = query
@@ -6843,6 +7069,9 @@ class AzureQueueSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
"""
_validation = {
@@ -6857,6 +7086,7 @@ class AzureQueueSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
}
def __init__(
@@ -6868,9 +7098,10 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
**kwargs
):
- super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureQueueSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureQueueSink' # type: str
@@ -6969,6 +7200,9 @@ class AzureSearchIndexSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Specify the write behavior when upserting documents into Azure Search
Index. Possible values include: "Merge", "Upload".
:type write_behavior: str or
@@ -6987,6 +7221,7 @@ class AzureSearchIndexSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
}
@@ -6999,10 +7234,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
write_behavior: Optional[Union[str, "AzureSearchIndexWriteBehaviorType"]] = None,
**kwargs
):
- super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureSearchIndexSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureSearchIndexSink' # type: str
self.write_behavior = write_behavior
@@ -7116,6 +7352,8 @@ class AzureSqlDatabaseLinkedService(LinkedService):
:param always_encrypted_settings: Sql always encrypted properties.
:type always_encrypted_settings:
~data_factory_management_client.models.SqlAlwaysEncryptedProperties
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -7138,6 +7376,7 @@ class AzureSqlDatabaseLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -7156,6 +7395,7 @@ def __init__(
azure_cloud_type: Optional[object] = None,
encrypted_credential: Optional[object] = None,
always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -7168,6 +7408,7 @@ def __init__(
self.azure_cloud_type = azure_cloud_type
self.encrypted_credential = encrypted_credential
self.always_encrypted_settings = always_encrypted_settings
+ self.credential = credential
class AzureSqlDwLinkedService(LinkedService):
@@ -7210,6 +7451,8 @@ class AzureSqlDwLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -7231,6 +7474,7 @@ class AzureSqlDwLinkedService(LinkedService):
'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -7248,6 +7492,7 @@ def __init__(
tenant: Optional[object] = None,
azure_cloud_type: Optional[object] = None,
encrypted_credential: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureSqlDwLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -7259,6 +7504,7 @@ def __init__(
self.tenant = tenant
self.azure_cloud_type = azure_cloud_type
self.encrypted_credential = encrypted_credential
+ self.credential = credential
class AzureSqlDwTableDataset(Dataset):
@@ -7385,6 +7631,8 @@ class AzureSqlMiLinkedService(LinkedService):
:param always_encrypted_settings: Sql always encrypted properties.
:type always_encrypted_settings:
~data_factory_management_client.models.SqlAlwaysEncryptedProperties
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -7407,6 +7655,7 @@ class AzureSqlMiLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -7425,6 +7674,7 @@ def __init__(
azure_cloud_type: Optional[object] = None,
encrypted_credential: Optional[object] = None,
always_encrypted_settings: Optional["SqlAlwaysEncryptedProperties"] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(AzureSqlMiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -7437,6 +7687,7 @@ def __init__(
self.azure_cloud_type = azure_cloud_type
self.encrypted_credential = encrypted_credential
self.always_encrypted_settings = always_encrypted_settings
+ self.credential = credential
class AzureSqlMiTableDataset(Dataset):
@@ -7545,6 +7796,9 @@ class AzureSqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -7563,6 +7817,14 @@ class AzureSqlSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into Azure SQL. Type:
+ SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -7577,12 +7839,16 @@ class AzureSqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -7594,15 +7860,19 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
sql_writer_stored_procedure_name: Optional[object] = None,
sql_writer_table_type: Optional[object] = None,
pre_copy_script: Optional[object] = None,
stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None,
stored_procedure_table_type_parameter_name: Optional[object] = None,
table_option: Optional[object] = None,
+ sql_writer_use_table_lock: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ upsert_settings: Optional["SqlUpsertSettings"] = None,
**kwargs
):
- super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureSqlSink' # type: str
self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name
self.sql_writer_table_type = sql_writer_table_type
@@ -7610,6 +7880,9 @@ def __init__(
self.stored_procedure_parameters = stored_procedure_parameters
self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name
self.table_option = table_option
+ self.sql_writer_use_table_lock = sql_writer_use_table_lock
+ self.write_behavior = write_behavior
+ self.upsert_settings = upsert_settings
class AzureSqlSource(TabularSource):
@@ -7631,6 +7904,9 @@ class AzureSqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -7666,6 +7942,7 @@ class AzureSqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
@@ -7683,6 +7960,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
sql_reader_query: Optional[object] = None,
@@ -7693,7 +7971,7 @@ def __init__(
partition_settings: Optional["SqlPartitionSettings"] = None,
**kwargs
):
- super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AzureSqlSource' # type: str
self.sql_reader_query = sql_reader_query
self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name
@@ -7955,6 +8233,9 @@ class AzureTableSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param azure_table_default_partition_key_value: Azure Table default partition key value. Type:
string (or Expression with resultType string).
:type azure_table_default_partition_key_value: object
@@ -7981,6 +8262,7 @@ class AzureTableSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'azure_table_default_partition_key_value': {'key': 'azureTableDefaultPartitionKeyValue', 'type': 'object'},
'azure_table_partition_key_name': {'key': 'azureTablePartitionKeyName', 'type': 'object'},
'azure_table_row_key_name': {'key': 'azureTableRowKeyName', 'type': 'object'},
@@ -7996,13 +8278,14 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
azure_table_default_partition_key_value: Optional[object] = None,
azure_table_partition_key_name: Optional[object] = None,
azure_table_row_key_name: Optional[object] = None,
azure_table_insert_type: Optional[object] = None,
**kwargs
):
- super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(AzureTableSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'AzureTableSink' # type: str
self.azure_table_default_partition_key_value = azure_table_default_partition_key_value
self.azure_table_partition_key_name = azure_table_partition_key_name
@@ -8029,6 +8312,9 @@ class AzureTableSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -8053,6 +8339,7 @@ class AzureTableSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'},
@@ -8066,13 +8353,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
azure_table_source_query: Optional[object] = None,
azure_table_source_ignore_table_not_found: Optional[object] = None,
**kwargs
):
- super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'AzureTableSource' # type: str
self.azure_table_source_query = azure_table_source_query
self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found
@@ -8327,6 +8615,9 @@ class BinarySink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Binary store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
"""
@@ -8343,6 +8634,7 @@ class BinarySink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
}
@@ -8355,10 +8647,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreWriteSettings"] = None,
**kwargs
):
- super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(BinarySink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'BinarySink' # type: str
self.store_settings = store_settings
@@ -8382,6 +8675,9 @@ class BinarySource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Binary store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: Binary format settings.
@@ -8398,6 +8694,7 @@ class BinarySource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'},
}
@@ -8409,11 +8706,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
format_settings: Optional["BinaryReadSettings"] = None,
**kwargs
):
- super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'BinarySource' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -8641,6 +8939,9 @@ class BlobSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param blob_writer_overwrite_files: Blob writer overwrite files. Type: boolean (or Expression
with resultType boolean).
:type blob_writer_overwrite_files: object
@@ -8652,6 +8953,9 @@ class BlobSink(CopySink):
:type blob_writer_add_header: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
+ :param metadata: Specify the custom metadata to be added to sink data. Type: array of objects
+ (or Expression with resultType array of objects).
+ :type metadata: list[~data_factory_management_client.models.MetadataItem]
"""
_validation = {
@@ -8666,10 +8970,12 @@ class BlobSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'blob_writer_overwrite_files': {'key': 'blobWriterOverwriteFiles', 'type': 'object'},
'blob_writer_date_time_format': {'key': 'blobWriterDateTimeFormat', 'type': 'object'},
'blob_writer_add_header': {'key': 'blobWriterAddHeader', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
+ 'metadata': {'key': 'metadata', 'type': '[MetadataItem]'},
}
def __init__(
@@ -8681,18 +8987,21 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
blob_writer_overwrite_files: Optional[object] = None,
blob_writer_date_time_format: Optional[object] = None,
blob_writer_add_header: Optional[object] = None,
copy_behavior: Optional[object] = None,
+ metadata: Optional[List["MetadataItem"]] = None,
**kwargs
):
- super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(BlobSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'BlobSink' # type: str
self.blob_writer_overwrite_files = blob_writer_overwrite_files
self.blob_writer_date_time_format = blob_writer_date_time_format
self.blob_writer_add_header = blob_writer_add_header
self.copy_behavior = copy_behavior
+ self.metadata = metadata
class BlobSource(CopySource):
@@ -8714,6 +9023,9 @@ class BlobSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param treat_empty_as_null: Treat empty as null. Type: boolean (or Expression with resultType
boolean).
:type treat_empty_as_null: object
@@ -8735,6 +9047,7 @@ class BlobSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'treat_empty_as_null': {'key': 'treatEmptyAsNull', 'type': 'object'},
'skip_header_line_count': {'key': 'skipHeaderLineCount', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
@@ -8747,12 +9060,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
treat_empty_as_null: Optional[object] = None,
skip_header_line_count: Optional[object] = None,
recursive: Optional[object] = None,
**kwargs
):
- super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(BlobSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'BlobSource' # type: str
self.treat_empty_as_null = treat_empty_as_null
self.skip_header_line_count = skip_header_line_count
@@ -8931,6 +9245,9 @@ class CassandraSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -8960,6 +9277,7 @@ class CassandraSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -8973,13 +9291,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
consistency_level: Optional[Union[str, "CassandraSourceReadConsistencyLevels"]] = None,
**kwargs
):
- super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'CassandraSource' # type: str
self.query = query
self.consistency_level = consistency_level
@@ -9325,8 +9644,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
:param deployment_type: Required. The deployment type of the Common Data Service for Apps
instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common
Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType
- string). Possible values include: "Online", "OnPremisesWithIfd".
- :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType
+ string).
+ :type deployment_type: object
:param host_name: The host name of the on-premises Common Data Service for Apps server. The
property is required for on-prem and not allowed for online. Type: string (or Expression with
resultType string).
@@ -9347,10 +9666,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
:param authentication_type: Required. The authentication type to connect to Common Data Service
for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario.
'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or
- Expression with resultType string). Possible values include: "Office365", "Ifd",
- "AADServicePrincipal".
- :type authentication_type: str or
- ~data_factory_management_client.models.DynamicsAuthenticationType
+ Expression with resultType string).
+ :type authentication_type: object
:param username: User name to access the Common Data Service for Apps instance. Type: string
(or Expression with resultType string).
:type username: object
@@ -9361,10 +9678,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
:type service_principal_id: object
:param service_principal_credential_type: The service principal credential type to use in
Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert'
- for certificate. Type: string (or Expression with resultType string). Possible values include:
- "ServicePrincipalKey", "ServicePrincipalCert".
- :type service_principal_credential_type: str or
- ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType
+ for certificate. Type: string (or Expression with resultType string).
+ :type service_principal_credential_type: object
:param service_principal_credential: The credential of the service principal object in Azure
Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey',
servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If
@@ -9390,16 +9705,16 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
- 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
+ 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
- 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
+ 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
- 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'},
+ 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'},
'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}
@@ -9407,8 +9722,8 @@ class CommonDataServiceForAppsLinkedService(LinkedService):
def __init__(
self,
*,
- deployment_type: Union[str, "DynamicsDeploymentType"],
- authentication_type: Union[str, "DynamicsAuthenticationType"],
+ deployment_type: object,
+ authentication_type: object,
additional_properties: Optional[Dict[str, object]] = None,
connect_via: Optional["IntegrationRuntimeReference"] = None,
description: Optional[str] = None,
@@ -9421,7 +9736,7 @@ def __init__(
username: Optional[object] = None,
password: Optional["SecretBase"] = None,
service_principal_id: Optional[object] = None,
- service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None,
+ service_principal_credential_type: Optional[object] = None,
service_principal_credential: Optional["SecretBase"] = None,
encrypted_credential: Optional[object] = None,
**kwargs
@@ -9467,6 +9782,9 @@ class CommonDataServiceForAppsSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Required. The write behavior for the operation. Possible values include:
"Upsert".
:type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior
@@ -9492,6 +9810,7 @@ class CommonDataServiceForAppsSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'},
@@ -9507,11 +9826,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
ignore_null_values: Optional[object] = None,
alternate_key_name: Optional[object] = None,
**kwargs
):
- super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(CommonDataServiceForAppsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'CommonDataServiceForAppsSink' # type: str
self.write_behavior = write_behavior
self.ignore_null_values = ignore_null_values
@@ -9537,6 +9857,9 @@ class CommonDataServiceForAppsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: FetchXML is a proprietary query language that is used in Microsoft Common Data
Service for Apps (online & on-premises). Type: string (or Expression with resultType string).
:type query: object
@@ -9555,6 +9878,7 @@ class CommonDataServiceForAppsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -9566,11 +9890,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'CommonDataServiceForAppsSource' # type: str
self.query = query
self.additional_columns = additional_columns
@@ -9836,6 +10161,9 @@ class ConcurSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -9857,6 +10185,7 @@ class ConcurSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -9869,12 +10198,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'ConcurSource' # type: str
self.query = query
@@ -10389,6 +10719,9 @@ class CosmosDbMongoDbApiLinkedService(LinkedService):
:type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the linked service.
:type annotations: list[object]
+ :param is_server_version_above32: Whether the CosmosDB (MongoDB API) server version is higher
+ than 3.2. The default value is false. Type: boolean (or Expression with resultType boolean).
+ :type is_server_version_above32: object
:param connection_string: Required. The CosmosDB (MongoDB API) connection string. Type: string,
SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or
AzureKeyVaultSecretReference.
@@ -10411,6 +10744,7 @@ class CosmosDbMongoDbApiLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
+ 'is_server_version_above32': {'key': 'typeProperties.isServerVersionAbove32', 'type': 'object'},
'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'},
'database': {'key': 'typeProperties.database', 'type': 'object'},
}
@@ -10425,10 +10759,12 @@ def __init__(
description: Optional[str] = None,
parameters: Optional[Dict[str, "ParameterSpecification"]] = None,
annotations: Optional[List[object]] = None,
+ is_server_version_above32: Optional[object] = None,
**kwargs
):
super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
self.type = 'CosmosDbMongoDbApi' # type: str
+ self.is_server_version_above32 = is_server_version_above32
self.connection_string = connection_string
self.database = database
@@ -10458,6 +10794,9 @@ class CosmosDbMongoDbApiSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Specifies whether the document with same key to be overwritten (upsert)
rather than throw exception (insert). The default value is "insert". Type: string (or
Expression with resultType string). Type: string (or Expression with resultType string).
@@ -10476,6 +10815,7 @@ class CosmosDbMongoDbApiSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
}
@@ -10488,10 +10828,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
write_behavior: Optional[object] = None,
**kwargs
):
- super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'CosmosDbMongoDbApiSink' # type: str
self.write_behavior = write_behavior
@@ -10515,6 +10856,9 @@ class CosmosDbMongoDbApiSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param filter: Specifies selection filter using query operators. To return all documents in a
collection, omit this parameter or pass an empty document ({}). Type: string (or Expression
with resultType string).
@@ -10544,6 +10888,7 @@ class CosmosDbMongoDbApiSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'filter': {'key': 'filter', 'type': 'object'},
'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'},
'batch_size': {'key': 'batchSize', 'type': 'object'},
@@ -10558,6 +10903,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
filter: Optional[object] = None,
cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None,
batch_size: Optional[object] = None,
@@ -10565,7 +10911,7 @@ def __init__(
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'CosmosDbMongoDbApiSource' # type: str
self.filter = filter
self.cursor_methods = cursor_methods
@@ -10669,6 +11015,9 @@ class CosmosDbSqlApiSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Describes how to write data to Azure Cosmos DB. Type: string (or
Expression with resultType string). Allowed values: insert and upsert.
:type write_behavior: object
@@ -10686,6 +11035,7 @@ class CosmosDbSqlApiSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
}
@@ -10698,10 +11048,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
write_behavior: Optional[object] = None,
**kwargs
):
- super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'CosmosDbSqlApiSink' # type: str
self.write_behavior = write_behavior
@@ -10725,6 +11076,9 @@ class CosmosDbSqlApiSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: SQL API query. Type: string (or Expression with resultType string).
:type query: object
:param page_size: Page size of the result. Type: integer (or Expression with resultType
@@ -10751,6 +11105,7 @@ class CosmosDbSqlApiSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'page_size': {'key': 'pageSize', 'type': 'object'},
'preferred_regions': {'key': 'preferredRegions', 'type': 'object'},
@@ -10765,6 +11120,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
page_size: Optional[object] = None,
preferred_regions: Optional[object] = None,
@@ -10772,7 +11128,7 @@ def __init__(
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'CosmosDbSqlApiSource' # type: str
self.query = query
self.page_size = page_size
@@ -10865,6 +11221,9 @@ class CouchbaseSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -10886,6 +11245,7 @@ class CouchbaseSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -10898,12 +11258,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'CouchbaseSource' # type: str
self.query = query
@@ -11107,6 +11468,181 @@ def __init__(
self.run_id = run_id
+class Credential(msrest.serialization.Model):
+ """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: ManagedIdentityCredential, ServicePrincipalCredential.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Type of credential.Constant filled by server.
+ :type type: str
+ :param description: Credential description.
+ :type description: str
+ :param annotations: List of tags that can be used for describing the Credential.
+ :type annotations: list[object]
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'annotations': {'key': 'annotations', 'type': '[object]'},
+ }
+
+ _subtype_map = {
+ 'type': {'ManagedIdentity': 'ManagedIdentityCredential', 'ServicePrincipal': 'ServicePrincipalCredential'}
+ }
+
+ def __init__(
+ self,
+ *,
+ additional_properties: Optional[Dict[str, object]] = None,
+ description: Optional[str] = None,
+ annotations: Optional[List[object]] = None,
+ **kwargs
+ ):
+ super(Credential, self).__init__(**kwargs)
+ self.additional_properties = additional_properties
+ self.type = 'Credential' # type: str
+ self.description = description
+ self.annotations = annotations
+
+
+class CredentialReference(msrest.serialization.Model):
+ """Credential reference type.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :ivar type: Required. Credential reference type. Default value: "CredentialReference".
+ :vartype type: str
+ :param reference_name: Required. Reference credential name.
+ :type reference_name: str
+ """
+
+ _validation = {
+ 'type': {'required': True, 'constant': True},
+ 'reference_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'reference_name': {'key': 'referenceName', 'type': 'str'},
+ }
+
+ type = "CredentialReference"
+
+ def __init__(
+ self,
+ *,
+ reference_name: str,
+ additional_properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(CredentialReference, self).__init__(**kwargs)
+ self.additional_properties = additional_properties
+ self.reference_name = reference_name
+
+
+class SubResource(msrest.serialization.Model):
+ """Azure Data Factory nested resource, which belongs to a factory.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The resource identifier.
+ :vartype id: str
+ :ivar name: The resource name.
+ :vartype name: str
+ :ivar type: The resource type.
+ :vartype type: str
+ :ivar etag: Etag identifies change in the resource.
+ :vartype etag: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'etag': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'etag': {'key': 'etag', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SubResource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.etag = None
+
+
+class CredentialResource(SubResource):
+ """Credential resource type.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :ivar id: The resource identifier.
+ :vartype id: str
+ :ivar name: The resource name.
+ :vartype name: str
+ :ivar type: The resource type.
+ :vartype type: str
+ :ivar etag: Etag identifies change in the resource.
+ :vartype etag: str
+ :param properties: Required. Properties of credentials.
+ :type properties: ~data_factory_management_client.models.Credential
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'etag': {'readonly': True},
+ 'properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'etag': {'key': 'etag', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': 'Credential'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: "Credential",
+ **kwargs
+ ):
+ super(CredentialResource, self).__init__(**kwargs)
+ self.properties = properties
+
+
class CustomActivity(ExecutionActivity):
"""Custom activity type.
@@ -12092,46 +12628,6 @@ def __init__(
self.dataset_parameters = dataset_parameters
-class SubResource(msrest.serialization.Model):
- """Azure Data Factory nested resource, which belongs to a factory.
-
- Variables are only populated by the server, and will be ignored when sending a request.
-
- :ivar id: The resource identifier.
- :vartype id: str
- :ivar name: The resource name.
- :vartype name: str
- :ivar type: The resource type.
- :vartype type: str
- :ivar etag: Etag identifies change in the resource.
- :vartype etag: str
- """
-
- _validation = {
- 'id': {'readonly': True},
- 'name': {'readonly': True},
- 'type': {'readonly': True},
- 'etag': {'readonly': True},
- }
-
- _attribute_map = {
- 'id': {'key': 'id', 'type': 'str'},
- 'name': {'key': 'name', 'type': 'str'},
- 'type': {'key': 'type', 'type': 'str'},
- 'etag': {'key': 'etag', 'type': 'str'},
- }
-
- def __init__(
- self,
- **kwargs
- ):
- super(SubResource, self).__init__(**kwargs)
- self.id = None
- self.name = None
- self.type = None
- self.etag = None
-
-
class DataFlowResource(SubResource):
"""Data flow resource type.
@@ -12594,8 +13090,8 @@ class DatasetDeflateCompression(DatasetCompression):
:type additional_properties: dict[str, object]
:param type: Required. Type of dataset compression.Constant filled by server.
:type type: str
- :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param level: The Deflate compression level.
+ :type level: object
"""
_validation = {
@@ -12605,14 +13101,14 @@ class DatasetDeflateCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
self,
*,
additional_properties: Optional[Dict[str, object]] = None,
- level: Optional[Union[str, "DatasetCompressionLevel"]] = None,
+ level: Optional[object] = None,
**kwargs
):
super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs)
@@ -12651,8 +13147,8 @@ class DatasetGZipCompression(DatasetCompression):
:type additional_properties: dict[str, object]
:param type: Required. Type of dataset compression.Constant filled by server.
:type type: str
- :param level: The GZip compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param level: The GZip compression level.
+ :type level: object
"""
_validation = {
@@ -12662,14 +13158,14 @@ class DatasetGZipCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
self,
*,
additional_properties: Optional[Dict[str, object]] = None,
- level: Optional[Union[str, "DatasetCompressionLevel"]] = None,
+ level: Optional[object] = None,
**kwargs
):
super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs)
@@ -12867,8 +13363,8 @@ class DatasetTarGZipCompression(DatasetCompression):
:type additional_properties: dict[str, object]
:param type: Required. Type of dataset compression.Constant filled by server.
:type type: str
- :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param level: The TarGZip compression level.
+ :type level: object
"""
_validation = {
@@ -12878,14 +13374,14 @@ class DatasetTarGZipCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
self,
*,
additional_properties: Optional[Dict[str, object]] = None,
- level: Optional[Union[str, "DatasetCompressionLevel"]] = None,
+ level: Optional[object] = None,
**kwargs
):
super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs)
@@ -12903,8 +13399,8 @@ class DatasetZipDeflateCompression(DatasetCompression):
:type additional_properties: dict[str, object]
:param type: Required. Type of dataset compression.Constant filled by server.
:type type: str
- :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest".
- :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param level: The ZipDeflate compression level.
+ :type level: object
"""
_validation = {
@@ -12914,14 +13410,14 @@ class DatasetZipDeflateCompression(DatasetCompression):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
- 'level': {'key': 'level', 'type': 'str'},
+ 'level': {'key': 'level', 'type': 'object'},
}
def __init__(
self,
*,
additional_properties: Optional[Dict[str, object]] = None,
- level: Optional[Union[str, "DatasetCompressionLevel"]] = None,
+ level: Optional[object] = None,
**kwargs
):
super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs)
@@ -13051,6 +13547,9 @@ class Db2Source(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -13071,6 +13570,7 @@ class Db2Source(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -13083,12 +13583,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'Db2Source' # type: str
self.query = query
@@ -13326,12 +13827,11 @@ class DelimitedTextDataset(Dataset):
https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with
resultType string).
:type encoding_name: object
- :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2",
- "deflate", "zipDeflate", "lz4", "tar", "tarGZip".
- :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec
- :param compression_level: The data compression method used for DelimitedText. Possible values
- include: "Optimal", "Fastest".
- :type compression_level: str or ~data_factory_management_client.models.DatasetCompressionLevel
+ :param compression_codec: The data compressionCodec. Type: string (or Expression with
+ resultType string).
+ :type compression_codec: object
+ :param compression_level: The data compression method used for DelimitedText.
+ :type compression_level: object
:param quote_char: The quote character. Type: string (or Expression with resultType string).
:type quote_char: object
:param escape_char: The escape character. Type: string (or Expression with resultType string).
@@ -13363,8 +13863,8 @@ class DelimitedTextDataset(Dataset):
'column_delimiter': {'key': 'typeProperties.columnDelimiter', 'type': 'object'},
'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'},
'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'},
- 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'},
- 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'},
+ 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'},
+ 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'},
'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'},
'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'},
'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'},
@@ -13386,8 +13886,8 @@ def __init__(
column_delimiter: Optional[object] = None,
row_delimiter: Optional[object] = None,
encoding_name: Optional[object] = None,
- compression_codec: Optional[Union[str, "CompressionCodec"]] = None,
- compression_level: Optional[Union[str, "DatasetCompressionLevel"]] = None,
+ compression_codec: Optional[object] = None,
+ compression_level: Optional[object] = None,
quote_char: Optional[object] = None,
escape_char: Optional[object] = None,
first_row_as_header: Optional[object] = None,
@@ -13475,6 +13975,9 @@ class DelimitedTextSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: DelimitedText store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: DelimitedText format settings.
@@ -13493,6 +13996,7 @@ class DelimitedTextSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextWriteSettings'},
}
@@ -13506,11 +14010,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreWriteSettings"] = None,
format_settings: Optional["DelimitedTextWriteSettings"] = None,
**kwargs
):
- super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DelimitedTextSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DelimitedTextSink' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -13535,6 +14040,9 @@ class DelimitedTextSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: DelimitedText store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: DelimitedText format settings.
@@ -13554,6 +14062,7 @@ class DelimitedTextSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
@@ -13566,12 +14075,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
format_settings: Optional["DelimitedTextReadSettings"] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DelimitedTextSource' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -13804,6 +14314,9 @@ class DocumentDbCollectionSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param nesting_separator: Nested properties separator. Default is . (dot). Type: string (or
Expression with resultType string).
:type nesting_separator: object
@@ -13824,6 +14337,7 @@ class DocumentDbCollectionSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
}
@@ -13837,11 +14351,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
nesting_separator: Optional[object] = None,
write_behavior: Optional[object] = None,
**kwargs
):
- super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DocumentDbCollectionSink' # type: str
self.nesting_separator = nesting_separator
self.write_behavior = write_behavior
@@ -13866,6 +14381,9 @@ class DocumentDbCollectionSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Documents query. Type: string (or Expression with resultType string).
:type query: object
:param nesting_separator: Nested properties separator. Type: string (or Expression with
@@ -13889,6 +14407,7 @@ class DocumentDbCollectionSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
@@ -13902,13 +14421,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
nesting_separator: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DocumentDbCollectionSource' # type: str
self.query = query
self.nesting_separator = nesting_separator
@@ -14000,6 +14520,9 @@ class DrillSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -14021,6 +14544,7 @@ class DrillSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -14033,12 +14557,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'DrillSource' # type: str
self.query = query
@@ -14361,6 +14886,9 @@ class DynamicsAxSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -14387,6 +14915,7 @@ class DynamicsAxSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -14400,13 +14929,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
http_request_timeout: Optional[object] = None,
**kwargs
):
- super(DynamicsAxSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(DynamicsAxSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'DynamicsAXSource' # type: str
self.query = query
self.http_request_timeout = http_request_timeout
@@ -14501,9 +15031,8 @@ class DynamicsCrmLinkedService(LinkedService):
:type annotations: list[object]
:param deployment_type: Required. The deployment type of the Dynamics CRM instance. 'Online'
for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type:
- string (or Expression with resultType string). Possible values include: "Online",
- "OnPremisesWithIfd".
- :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType
+ string (or Expression with resultType string).
+ :type deployment_type: object
:param host_name: The host name of the on-premises Dynamics CRM server. The property is
required for on-prem and not allowed for online. Type: string (or Expression with resultType
string).
@@ -14522,10 +15051,8 @@ class DynamicsCrmLinkedService(LinkedService):
:param authentication_type: Required. The authentication type to connect to Dynamics CRM
server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario,
'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or
- Expression with resultType string). Possible values include: "Office365", "Ifd",
- "AADServicePrincipal".
- :type authentication_type: str or
- ~data_factory_management_client.models.DynamicsAuthenticationType
+ Expression with resultType string).
+ :type authentication_type: object
:param username: User name to access the Dynamics CRM instance. Type: string (or Expression
with resultType string).
:type username: object
@@ -14536,10 +15063,8 @@ class DynamicsCrmLinkedService(LinkedService):
:type service_principal_id: object
:param service_principal_credential_type: The service principal credential type to use in
Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert'
- for certificate. Type: string (or Expression with resultType string). Possible values include:
- "ServicePrincipalKey", "ServicePrincipalCert".
- :type service_principal_credential_type: str or
- ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType
+ for certificate. Type: string (or Expression with resultType string).
+ :type service_principal_credential_type: object
:param service_principal_credential: The credential of the service principal object in Azure
Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey',
servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If
@@ -14565,16 +15090,16 @@ class DynamicsCrmLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
- 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
+ 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
- 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
+ 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
- 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'},
+ 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'},
'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}
@@ -14582,8 +15107,8 @@ class DynamicsCrmLinkedService(LinkedService):
def __init__(
self,
*,
- deployment_type: Union[str, "DynamicsDeploymentType"],
- authentication_type: Union[str, "DynamicsAuthenticationType"],
+ deployment_type: object,
+ authentication_type: object,
additional_properties: Optional[Dict[str, object]] = None,
connect_via: Optional["IntegrationRuntimeReference"] = None,
description: Optional[str] = None,
@@ -14596,7 +15121,7 @@ def __init__(
username: Optional[object] = None,
password: Optional["SecretBase"] = None,
service_principal_id: Optional[object] = None,
- service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None,
+ service_principal_credential_type: Optional[object] = None,
service_principal_credential: Optional["SecretBase"] = None,
encrypted_credential: Optional[object] = None,
**kwargs
@@ -14642,6 +15167,9 @@ class DynamicsCrmSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Required. The write behavior for the operation. Possible values include:
"Upsert".
:type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior
@@ -14667,6 +15195,7 @@ class DynamicsCrmSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'},
@@ -14682,11 +15211,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
ignore_null_values: Optional[object] = None,
alternate_key_name: Optional[object] = None,
**kwargs
):
- super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DynamicsCrmSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DynamicsCrmSink' # type: str
self.write_behavior = write_behavior
self.ignore_null_values = ignore_null_values
@@ -14712,6 +15242,9 @@ class DynamicsCrmSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM
(online & on-premises). Type: string (or Expression with resultType string).
:type query: object
@@ -14730,6 +15263,7 @@ class DynamicsCrmSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -14741,11 +15275,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DynamicsCrmSource' # type: str
self.query = query
self.additional_columns = additional_columns
@@ -14840,8 +15375,8 @@ class DynamicsLinkedService(LinkedService):
:type annotations: list[object]
:param deployment_type: Required. The deployment type of the Dynamics instance. 'Online' for
Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or
- Expression with resultType string). Possible values include: "Online", "OnPremisesWithIfd".
- :type deployment_type: str or ~data_factory_management_client.models.DynamicsDeploymentType
+ Expression with resultType string).
+ :type deployment_type: object
:param host_name: The host name of the on-premises Dynamics server. The property is required
for on-prem and not allowed for online. Type: string (or Expression with resultType string).
:type host_name: object
@@ -14859,9 +15394,8 @@ class DynamicsLinkedService(LinkedService):
:param authentication_type: Required. The authentication type to connect to Dynamics server.
'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal'
for Server-To-Server authentication in online scenario. Type: string (or Expression with
- resultType string). Possible values include: "Office365", "Ifd", "AADServicePrincipal".
- :type authentication_type: str or
- ~data_factory_management_client.models.DynamicsAuthenticationType
+ resultType string).
+ :type authentication_type: object
:param username: User name to access the Dynamics instance. Type: string (or Expression with
resultType string).
:type username: object
@@ -14872,10 +15406,8 @@ class DynamicsLinkedService(LinkedService):
:type service_principal_id: object
:param service_principal_credential_type: The service principal credential type to use in
Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert'
- for certificate. Type: string (or Expression with resultType string). Possible values include:
- "ServicePrincipalKey", "ServicePrincipalCert".
- :type service_principal_credential_type: str or
- ~data_factory_management_client.models.DynamicsServicePrincipalCredentialType
+ for certificate. Type: string (or Expression with resultType string).
+ :type service_principal_credential_type: str
:param service_principal_credential: The credential of the service principal object in Azure
Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey',
servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If
@@ -14901,12 +15433,12 @@ class DynamicsLinkedService(LinkedService):
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
- 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
+ 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'object'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
- 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
+ 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
@@ -14918,8 +15450,8 @@ class DynamicsLinkedService(LinkedService):
def __init__(
self,
*,
- deployment_type: Union[str, "DynamicsDeploymentType"],
- authentication_type: Union[str, "DynamicsAuthenticationType"],
+ deployment_type: object,
+ authentication_type: object,
additional_properties: Optional[Dict[str, object]] = None,
connect_via: Optional["IntegrationRuntimeReference"] = None,
description: Optional[str] = None,
@@ -14932,7 +15464,7 @@ def __init__(
username: Optional[object] = None,
password: Optional["SecretBase"] = None,
service_principal_id: Optional[object] = None,
- service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None,
+ service_principal_credential_type: Optional[str] = None,
service_principal_credential: Optional["SecretBase"] = None,
encrypted_credential: Optional[object] = None,
**kwargs
@@ -14978,6 +15510,9 @@ class DynamicsSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: Required. The write behavior for the operation. Possible values include:
"Upsert".
:type write_behavior: str or ~data_factory_management_client.models.DynamicsSinkWriteBehavior
@@ -15003,6 +15538,7 @@ class DynamicsSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
'alternate_key_name': {'key': 'alternateKeyName', 'type': 'object'},
@@ -15018,11 +15554,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
ignore_null_values: Optional[object] = None,
alternate_key_name: Optional[object] = None,
**kwargs
):
- super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DynamicsSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DynamicsSink' # type: str
self.write_behavior = write_behavior
self.ignore_null_values = ignore_null_values
@@ -15048,6 +15585,9 @@ class DynamicsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics
(online & on-premises). Type: string (or Expression with resultType string).
:type query: object
@@ -15066,6 +15606,7 @@ class DynamicsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -15077,11 +15618,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'DynamicsSource' # type: str
self.query = query
self.additional_columns = additional_columns
@@ -15265,6 +15807,9 @@ class EloquaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -15286,6 +15831,7 @@ class EloquaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -15298,12 +15844,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'EloquaSource' # type: str
self.query = query
@@ -15448,9 +15995,12 @@ class ExcelDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the excel storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType
+ :param sheet_name: The sheet name of excel file. Type: string (or Expression with resultType
string).
:type sheet_name: object
+ :param sheet_index: The sheet index of excel file and default value is 0. Type: integer (or
+ Expression with resultType integer).
+ :type sheet_index: object
:param range: The partial data of one sheet. Type: string (or Expression with resultType
string).
:type range: object
@@ -15481,6 +16031,7 @@ class ExcelDataset(Dataset):
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'},
+ 'sheet_index': {'key': 'typeProperties.sheetIndex', 'type': 'object'},
'range': {'key': 'typeProperties.range', 'type': 'object'},
'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'},
'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'},
@@ -15500,6 +16051,7 @@ def __init__(
folder: Optional["DatasetFolder"] = None,
location: Optional["DatasetLocation"] = None,
sheet_name: Optional[object] = None,
+ sheet_index: Optional[object] = None,
range: Optional[object] = None,
first_row_as_header: Optional[object] = None,
compression: Optional["DatasetCompression"] = None,
@@ -15510,6 +16062,7 @@ def __init__(
self.type = 'Excel' # type: str
self.location = location
self.sheet_name = sheet_name
+ self.sheet_index = sheet_index
self.range = range
self.first_row_as_header = first_row_as_header
self.compression = compression
@@ -15535,6 +16088,9 @@ class ExcelSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Excel store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
@@ -15552,6 +16108,7 @@ class ExcelSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -15563,11 +16120,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'ExcelSource' # type: str
self.store_settings = store_settings
self.additional_columns = additional_columns
@@ -16614,6 +17172,9 @@ class FileServerReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -16654,6 +17215,7 @@ class FileServerReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -16671,6 +17233,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -16683,7 +17246,7 @@ def __init__(
file_filter: Optional[object] = None,
**kwargs
):
- super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'FileServerReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -16710,6 +17273,9 @@ class FileServerWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -16722,6 +17288,7 @@ class FileServerWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -16730,10 +17297,11 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
**kwargs
):
- super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
+ super(FileServerWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs)
self.type = 'FileServerWriteSettings' # type: str
@@ -16865,6 +17433,9 @@ class FileSystemSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
"""
@@ -16881,6 +17452,7 @@ class FileSystemSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
}
@@ -16893,10 +17465,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
**kwargs
):
- super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(FileSystemSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'FileSystemSink' # type: str
self.copy_behavior = copy_behavior
@@ -16920,6 +17493,9 @@ class FileSystemSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -16938,6 +17514,7 @@ class FileSystemSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -16949,11 +17526,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'FileSystemSource' # type: str
self.recursive = recursive
self.additional_columns = additional_columns
@@ -17104,6 +17682,9 @@ class FtpReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -17137,6 +17718,7 @@ class FtpReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -17152,6 +17734,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -17162,7 +17745,7 @@ def __init__(
use_binary_transfer: Optional[bool] = None,
**kwargs
):
- super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'FtpReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -17446,6 +18029,8 @@ class GitHubAccessTokenRequest(msrest.serialization.Model):
:type git_hub_access_code: str
:param git_hub_client_id: GitHub application client ID.
:type git_hub_client_id: str
+ :param git_hub_client_secret: GitHub bring your own app client secret information.
+ :type git_hub_client_secret: ~data_factory_management_client.models.GitHubClientSecret
:param git_hub_access_token_base_url: Required. GitHub access token base URL.
:type git_hub_access_token_base_url: str
"""
@@ -17458,6 +18043,7 @@ class GitHubAccessTokenRequest(msrest.serialization.Model):
_attribute_map = {
'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'},
'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'},
+ 'git_hub_client_secret': {'key': 'gitHubClientSecret', 'type': 'GitHubClientSecret'},
'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'},
}
@@ -17467,11 +18053,13 @@ def __init__(
git_hub_access_code: str,
git_hub_access_token_base_url: str,
git_hub_client_id: Optional[str] = None,
+ git_hub_client_secret: Optional["GitHubClientSecret"] = None,
**kwargs
):
super(GitHubAccessTokenRequest, self).__init__(**kwargs)
self.git_hub_access_code = git_hub_access_code
self.git_hub_client_id = git_hub_client_id
+ self.git_hub_client_secret = git_hub_client_secret
self.git_hub_access_token_base_url = git_hub_access_token_base_url
@@ -17496,6 +18084,32 @@ def __init__(
self.git_hub_access_token = git_hub_access_token
+class GitHubClientSecret(msrest.serialization.Model):
+ """Client secret information for factory's bring your own app repository configuration.
+
+ :param byoa_secret_akv_url: Bring your own app client secret AKV URL.
+ :type byoa_secret_akv_url: str
+ :param byoa_secret_name: Bring your own app client secret name in AKV.
+ :type byoa_secret_name: str
+ """
+
+ _attribute_map = {
+ 'byoa_secret_akv_url': {'key': 'byoaSecretAkvUrl', 'type': 'str'},
+ 'byoa_secret_name': {'key': 'byoaSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ byoa_secret_akv_url: Optional[str] = None,
+ byoa_secret_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(GitHubClientSecret, self).__init__(**kwargs)
+ self.byoa_secret_akv_url = byoa_secret_akv_url
+ self.byoa_secret_name = byoa_secret_name
+
+
class GlobalParameterSpecification(msrest.serialization.Model):
"""Definition of a single parameter for an entity.
@@ -17737,6 +18351,9 @@ class GoogleAdWordsSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -17758,6 +18375,7 @@ class GoogleAdWordsSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -17770,12 +18388,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'GoogleAdWordsSource' # type: str
self.query = query
@@ -18004,6 +18623,9 @@ class GoogleBigQuerySource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -18025,6 +18647,7 @@ class GoogleBigQuerySource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -18037,12 +18660,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'GoogleBigQuerySource' # type: str
self.query = query
@@ -18187,6 +18811,9 @@ class GoogleCloudStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -18227,6 +18854,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -18244,6 +18872,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -18256,7 +18885,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'GoogleCloudStorageReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -18354,6 +18983,9 @@ class GreenplumSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -18375,6 +19007,7 @@ class GreenplumSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -18387,12 +19020,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'GreenplumSource' # type: str
self.query = query
@@ -18679,6 +19313,9 @@ class HBaseSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -18700,6 +19337,7 @@ class HBaseSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -18712,12 +19350,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'HBaseSource' # type: str
self.query = query
@@ -18854,6 +19493,9 @@ class HdfsReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -18893,6 +19535,7 @@ class HdfsReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -18910,6 +19553,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -18922,7 +19566,7 @@ def __init__(
delete_files_after_completion: Optional[object] = None,
**kwargs
):
- super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'HdfsReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -18955,6 +19599,9 @@ class HdfsSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -18972,6 +19619,7 @@ class HdfsSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'},
}
@@ -18983,11 +19631,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
distcp_settings: Optional["DistcpSettings"] = None,
**kwargs
):
- super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(HdfsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'HdfsSource' # type: str
self.recursive = recursive
self.distcp_settings = distcp_settings
@@ -19407,6 +20056,8 @@ class HdInsightOnDemandLinkedService(LinkedService):
:param subnet_name: The ARM resource ID for the subnet in the vNet. If virtualNetworkId was
specified, then this property is required. Type: string (or Expression with resultType string).
:type subnet_name: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -19460,6 +20111,7 @@ class HdInsightOnDemandLinkedService(LinkedService):
'script_actions': {'key': 'typeProperties.scriptActions', 'type': '[ScriptAction]'},
'virtual_network_id': {'key': 'typeProperties.virtualNetworkId', 'type': 'object'},
'subnet_name': {'key': 'typeProperties.subnetName', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -19503,6 +20155,7 @@ def __init__(
script_actions: Optional[List["ScriptAction"]] = None,
virtual_network_id: Optional[object] = None,
subnet_name: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(HdInsightOnDemandLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -19540,6 +20193,7 @@ def __init__(
self.script_actions = script_actions
self.virtual_network_id = virtual_network_id
self.subnet_name = subnet_name
+ self.credential = credential
class HdInsightPigActivity(ExecutionActivity):
@@ -20109,6 +20763,9 @@ class HiveSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -20130,6 +20787,7 @@ class HiveSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -20142,12 +20800,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'HiveSource' # type: str
self.query = query
@@ -20372,6 +21031,9 @@ class HttpReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param request_method: The HTTP method used to call the RESTful API. The default is GET. Type:
string (or Expression with resultType string).
:type request_method: object
@@ -20399,6 +21061,7 @@ class HttpReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'request_method': {'key': 'requestMethod', 'type': 'object'},
'request_body': {'key': 'requestBody', 'type': 'object'},
'additional_headers': {'key': 'additionalHeaders', 'type': 'object'},
@@ -20412,6 +21075,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
request_method: Optional[object] = None,
request_body: Optional[object] = None,
additional_headers: Optional[object] = None,
@@ -20420,7 +21084,7 @@ def __init__(
partition_root_path: Optional[object] = None,
**kwargs
):
- super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'HttpReadSettings' # type: str
self.request_method = request_method
self.request_body = request_body
@@ -20496,6 +21160,9 @@ class HttpSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param http_request_timeout: Specifies the timeout for a HTTP client to get HTTP response from
HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string
(or Expression with resultType string), pattern:
@@ -20513,6 +21180,7 @@ class HttpSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -20523,10 +21191,11 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
http_request_timeout: Optional[object] = None,
**kwargs
):
- super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(HttpSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'HttpSource' # type: str
self.http_request_timeout = http_request_timeout
@@ -20714,6 +21383,9 @@ class HubspotSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -20735,6 +21407,7 @@ class HubspotSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -20747,12 +21420,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'HubspotSource' # type: str
self.query = query
@@ -21042,6 +21716,9 @@ class ImpalaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -21063,6 +21740,7 @@ class ImpalaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -21075,12 +21753,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'ImpalaSource' # type: str
self.query = query
@@ -21196,6 +21875,9 @@ class InformixSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -21213,6 +21895,7 @@ class InformixSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -21225,10 +21908,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
**kwargs
):
- super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(InformixSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'InformixSink' # type: str
self.pre_copy_script = pre_copy_script
@@ -21252,6 +21936,9 @@ class InformixSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -21272,6 +21959,7 @@ class InformixSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -21284,12 +21972,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'InformixSource' # type: str
self.query = query
@@ -22031,6 +22720,8 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model):
list[~data_factory_management_client.models.CustomSetupBase]
:param package_stores: Package stores for the SSIS Integration Runtime.
:type package_stores: list[~data_factory_management_client.models.PackageStore]
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_attribute_map = {
@@ -22042,6 +22733,7 @@ class IntegrationRuntimeSsisProperties(msrest.serialization.Model):
'edition': {'key': 'edition', 'type': 'str'},
'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'},
'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'},
+ 'credential': {'key': 'credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -22055,6 +22747,7 @@ def __init__(
edition: Optional[Union[str, "IntegrationRuntimeEdition"]] = None,
express_custom_setup_properties: Optional[List["CustomSetupBase"]] = None,
package_stores: Optional[List["PackageStore"]] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs)
@@ -22066,6 +22759,7 @@ def __init__(
self.edition = edition
self.express_custom_setup_properties = express_custom_setup_properties
self.package_stores = package_stores
+ self.credential = credential
class IntegrationRuntimeStatus(msrest.serialization.Model):
@@ -22411,6 +23105,9 @@ class JiraSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -22432,6 +23129,7 @@ class JiraSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -22444,12 +23142,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'JiraSource' # type: str
self.query = query
@@ -22551,9 +23250,8 @@ class JsonFormat(DatasetStorageFormat):
:param deserializer: Deserializer. Type: string (or Expression with resultType string).
:type deserializer: object
:param file_pattern: File pattern of JSON. To be more specific, the way of separating a
- collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. Possible
- values include: "setOfObjects", "arrayOfObjects".
- :type file_pattern: str or ~data_factory_management_client.models.JsonFormatFilePattern
+ collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive.
+ :type file_pattern: object
:param nesting_separator: The character used to separate nesting levels. Default value is '.'
(dot). Type: string (or Expression with resultType string).
:type nesting_separator: object
@@ -22583,7 +23281,7 @@ class JsonFormat(DatasetStorageFormat):
'type': {'key': 'type', 'type': 'str'},
'serializer': {'key': 'serializer', 'type': 'object'},
'deserializer': {'key': 'deserializer', 'type': 'object'},
- 'file_pattern': {'key': 'filePattern', 'type': 'str'},
+ 'file_pattern': {'key': 'filePattern', 'type': 'object'},
'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'},
'encoding_name': {'key': 'encodingName', 'type': 'object'},
'json_node_reference': {'key': 'jsonNodeReference', 'type': 'object'},
@@ -22596,7 +23294,7 @@ def __init__(
additional_properties: Optional[Dict[str, object]] = None,
serializer: Optional[object] = None,
deserializer: Optional[object] = None,
- file_pattern: Optional[Union[str, "JsonFormatFilePattern"]] = None,
+ file_pattern: Optional[object] = None,
nesting_separator: Optional[object] = None,
encoding_name: Optional[object] = None,
json_node_reference: Optional[object] = None,
@@ -22673,6 +23371,9 @@ class JsonSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Json store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: Json format settings.
@@ -22691,6 +23392,7 @@ class JsonSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'JsonWriteSettings'},
}
@@ -22704,11 +23406,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreWriteSettings"] = None,
format_settings: Optional["JsonWriteSettings"] = None,
**kwargs
):
- super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(JsonSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'JsonSink' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -22733,6 +23436,9 @@ class JsonSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Json store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: Json format settings.
@@ -22752,6 +23458,7 @@ class JsonSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
@@ -22764,12 +23471,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
format_settings: Optional["JsonReadSettings"] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'JsonSource' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -22787,9 +23495,8 @@ class JsonWriteSettings(FormatWriteSettings):
:param type: Required. The write setting type.Constant filled by server.
:type type: str
:param file_pattern: File pattern of JSON. This setting controls the way a collection of JSON
- objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. Possible
- values include: "setOfObjects", "arrayOfObjects".
- :type file_pattern: str or ~data_factory_management_client.models.JsonWriteFilePattern
+ objects will be treated. The default value is 'setOfObjects'. It is case-sensitive.
+ :type file_pattern: object
"""
_validation = {
@@ -22799,14 +23506,14 @@ class JsonWriteSettings(FormatWriteSettings):
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
- 'file_pattern': {'key': 'filePattern', 'type': 'str'},
+ 'file_pattern': {'key': 'filePattern', 'type': 'object'},
}
def __init__(
self,
*,
additional_properties: Optional[Dict[str, object]] = None,
- file_pattern: Optional[Union[str, "JsonWriteFilePattern"]] = None,
+ file_pattern: Optional[object] = None,
**kwargs
):
super(JsonWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs)
@@ -23504,6 +24211,9 @@ class MagentoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -23525,6 +24235,7 @@ class MagentoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -23537,16 +24248,61 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'MagentoSource' # type: str
self.query = query
+class ManagedIdentityCredential(Credential):
+ """Managed identity credential.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Type of credential.Constant filled by server.
+ :type type: str
+ :param description: Credential description.
+ :type description: str
+ :param annotations: List of tags that can be used for describing the Credential.
+ :type annotations: list[object]
+ :param resource_id: The resource id of user assigned managed identity.
+ :type resource_id: str
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'annotations': {'key': 'annotations', 'type': '[object]'},
+ 'resource_id': {'key': 'typeProperties.resourceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ additional_properties: Optional[Dict[str, object]] = None,
+ description: Optional[str] = None,
+ annotations: Optional[List[object]] = None,
+ resource_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(ManagedIdentityCredential, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs)
+ self.type = 'ManagedIdentity' # type: str
+ self.resource_id = resource_id
+
+
class ManagedIntegrationRuntime(IntegrationRuntime):
"""Managed integration runtime, including managed elastic and managed dedicated integration runtimes.
@@ -24245,6 +25001,9 @@ class MariaDbSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -24266,6 +25025,7 @@ class MariaDbSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -24278,12 +25038,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(MariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(MariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'MariaDBSource' # type: str
self.query = query
@@ -24533,6 +25294,9 @@ class MarketoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -24554,6 +25318,7 @@ class MarketoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -24566,16 +25331,43 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'MarketoSource' # type: str
self.query = query
+class MetadataItem(msrest.serialization.Model):
+ """Specify the name and value of custom metadata item.
+
+ :param name: Metadata item key name. Type: string (or Expression with resultType string).
+ :type name: object
+ :param value: Metadata item value. Type: string (or Expression with resultType string).
+ :type value: object
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'object'},
+ 'value': {'key': 'value', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[object] = None,
+ value: Optional[object] = None,
+ **kwargs
+ ):
+ super(MetadataItem, self).__init__(**kwargs)
+ self.name = name
+ self.value = value
+
+
class MicrosoftAccessLinkedService(LinkedService):
"""Microsoft Access linked service.
@@ -24687,6 +25479,9 @@ class MicrosoftAccessSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -24704,6 +25499,7 @@ class MicrosoftAccessSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -24716,10 +25512,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
**kwargs
):
- super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(MicrosoftAccessSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'MicrosoftAccessSink' # type: str
self.pre_copy_script = pre_copy_script
@@ -24743,6 +25540,9 @@ class MicrosoftAccessSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
@@ -24760,6 +25560,7 @@ class MicrosoftAccessSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -24771,11 +25572,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'MicrosoftAccessSource' # type: str
self.query = query
self.additional_columns = additional_columns
@@ -24982,6 +25784,74 @@ def __init__(
self.database = database
+class MongoDbAtlasSink(CopySink):
+ """A copy activity MongoDB Atlas sink.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Copy sink type.Constant filled by server.
+ :type type: str
+ :param write_batch_size: Write batch size. Type: integer (or Expression with resultType
+ integer), minimum: 0.
+ :type write_batch_size: object
+ :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType
+ string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type write_batch_timeout: object
+ :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType
+ integer).
+ :type sink_retry_count: object
+ :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string),
+ pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type sink_retry_wait: object
+ :param max_concurrent_connections: The maximum concurrent connection count for the sink data
+ store. Type: integer (or Expression with resultType integer).
+ :type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
+ :param write_behavior: Specifies whether the document with same key to be overwritten (upsert)
+ rather than throw exception (insert). The default value is "insert". Type: string (or
+ Expression with resultType string). Type: string (or Expression with resultType string).
+ :type write_behavior: object
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'},
+ 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'},
+ 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
+ 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
+ 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ *,
+ additional_properties: Optional[Dict[str, object]] = None,
+ write_batch_size: Optional[object] = None,
+ write_batch_timeout: Optional[object] = None,
+ sink_retry_count: Optional[object] = None,
+ sink_retry_wait: Optional[object] = None,
+ max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ **kwargs
+ ):
+ super(MongoDbAtlasSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
+ self.type = 'MongoDbAtlasSink' # type: str
+ self.write_behavior = write_behavior
+
+
class MongoDbAtlasSource(CopySource):
"""A copy activity source for a MongoDB Atlas database.
@@ -25001,6 +25871,9 @@ class MongoDbAtlasSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param filter: Specifies selection filter using query operators. To return all documents in a
collection, omit this parameter or pass an empty document ({}). Type: string (or Expression
with resultType string).
@@ -25030,6 +25903,7 @@ class MongoDbAtlasSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'filter': {'key': 'filter', 'type': 'object'},
'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'},
'batch_size': {'key': 'batchSize', 'type': 'object'},
@@ -25044,6 +25918,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
filter: Optional[object] = None,
cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None,
batch_size: Optional[object] = None,
@@ -25051,7 +25926,7 @@ def __init__(
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'MongoDbAtlasSource' # type: str
self.filter = filter
self.cursor_methods = cursor_methods
@@ -25308,6 +26183,9 @@ class MongoDbSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression
with resultType string).
:type query: object
@@ -25326,6 +26204,7 @@ class MongoDbSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -25337,11 +26216,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'MongoDbSource' # type: str
self.query = query
self.additional_columns = additional_columns
@@ -25478,6 +26358,74 @@ def __init__(
self.database = database
+class MongoDbV2Sink(CopySink):
+ """A copy activity MongoDB sink.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Copy sink type.Constant filled by server.
+ :type type: str
+ :param write_batch_size: Write batch size. Type: integer (or Expression with resultType
+ integer), minimum: 0.
+ :type write_batch_size: object
+ :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType
+ string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type write_batch_timeout: object
+ :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType
+ integer).
+ :type sink_retry_count: object
+ :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string),
+ pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ :type sink_retry_wait: object
+ :param max_concurrent_connections: The maximum concurrent connection count for the sink data
+ store. Type: integer (or Expression with resultType integer).
+ :type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
+ :param write_behavior: Specifies whether the document with same key to be overwritten (upsert)
+ rather than throw exception (insert). The default value is "insert". Type: string (or
+ Expression with resultType string). Type: string (or Expression with resultType string).
+ :type write_behavior: object
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'},
+ 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'},
+ 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
+ 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
+ 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ *,
+ additional_properties: Optional[Dict[str, object]] = None,
+ write_batch_size: Optional[object] = None,
+ write_batch_timeout: Optional[object] = None,
+ sink_retry_count: Optional[object] = None,
+ sink_retry_wait: Optional[object] = None,
+ max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ **kwargs
+ ):
+ super(MongoDbV2Sink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
+ self.type = 'MongoDbV2Sink' # type: str
+ self.write_behavior = write_behavior
+
+
class MongoDbV2Source(CopySource):
"""A copy activity source for a MongoDB database.
@@ -25497,6 +26445,9 @@ class MongoDbV2Source(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param filter: Specifies selection filter using query operators. To return all documents in a
collection, omit this parameter or pass an empty document ({}). Type: string (or Expression
with resultType string).
@@ -25526,6 +26477,7 @@ class MongoDbV2Source(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'filter': {'key': 'filter', 'type': 'object'},
'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'},
'batch_size': {'key': 'batchSize', 'type': 'object'},
@@ -25540,6 +26492,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
filter: Optional[object] = None,
cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None,
batch_size: Optional[object] = None,
@@ -25547,7 +26500,7 @@ def __init__(
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'MongoDbV2Source' # type: str
self.filter = filter
self.cursor_methods = cursor_methods
@@ -25640,6 +26593,9 @@ class MySqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -25660,6 +26616,7 @@ class MySqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -25672,12 +26629,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'MySqlSource' # type: str
self.query = query
@@ -25870,6 +26828,9 @@ class NetezzaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -25896,6 +26857,7 @@ class NetezzaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -25910,6 +26872,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
@@ -25917,7 +26880,7 @@ def __init__(
partition_settings: Optional["NetezzaPartitionSettings"] = None,
**kwargs
):
- super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'NetezzaSource' # type: str
self.query = query
self.partition_option = partition_option
@@ -26231,6 +27194,9 @@ class ODataSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: OData query. For example, "$top=1". Type: string (or Expression with resultType
string).
:type query: object
@@ -26254,6 +27220,7 @@ class ODataSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
@@ -26266,12 +27233,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
http_request_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'ODataSource' # type: str
self.query = query
self.http_request_timeout = http_request_timeout
@@ -26388,6 +27356,9 @@ class OdbcSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: A query to execute before starting the copy. Type: string (or
Expression with resultType string).
:type pre_copy_script: object
@@ -26405,6 +27376,7 @@ class OdbcSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -26417,10 +27389,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
**kwargs
):
- super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(OdbcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'OdbcSink' # type: str
self.pre_copy_script = pre_copy_script
@@ -26444,6 +27417,9 @@ class OdbcSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -26464,6 +27440,7 @@ class OdbcSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -26476,12 +27453,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'OdbcSource' # type: str
self.query = query
@@ -26730,6 +27708,9 @@ class Office365Source(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param allowed_groups: The groups containing all the users. Type: array of strings (or
Expression with resultType array of strings).
:type allowed_groups: object
@@ -26761,6 +27742,7 @@ class Office365Source(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'allowed_groups': {'key': 'allowedGroups', 'type': 'object'},
'user_scope_filter_uri': {'key': 'userScopeFilterUri', 'type': 'object'},
'date_filter_column': {'key': 'dateFilterColumn', 'type': 'object'},
@@ -26776,6 +27758,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
allowed_groups: Optional[object] = None,
user_scope_filter_uri: Optional[object] = None,
date_filter_column: Optional[object] = None,
@@ -26784,7 +27767,7 @@ def __init__(
output_columns: Optional[object] = None,
**kwargs
):
- super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(Office365Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'Office365Source' # type: str
self.allowed_groups = allowed_groups
self.user_scope_filter_uri = user_scope_filter_uri
@@ -27215,6 +28198,9 @@ class OracleCloudStorageReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -27255,6 +28241,7 @@ class OracleCloudStorageReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -27272,6 +28259,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -27284,7 +28272,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'OracleCloudStorageReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -27585,6 +28573,9 @@ class OracleServiceCloudSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -27606,6 +28597,7 @@ class OracleServiceCloudSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -27618,12 +28610,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'OracleServiceCloudSource' # type: str
self.query = query
@@ -27653,6 +28646,9 @@ class OracleSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -27670,6 +28666,7 @@ class OracleSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
}
@@ -27682,10 +28679,11 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
**kwargs
):
- super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(OracleSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'OracleSink' # type: str
self.pre_copy_script = pre_copy_script
@@ -27709,6 +28707,9 @@ class OracleSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param oracle_reader_query: Oracle reader query. Type: string (or Expression with resultType
string).
:type oracle_reader_query: object
@@ -27735,6 +28736,7 @@ class OracleSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'partition_option': {'key': 'partitionOption', 'type': 'object'},
@@ -27749,6 +28751,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
oracle_reader_query: Optional[object] = None,
query_timeout: Optional[object] = None,
partition_option: Optional[object] = None,
@@ -27756,7 +28759,7 @@ def __init__(
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'OracleSource' # type: str
self.oracle_reader_query = oracle_reader_query
self.query_timeout = query_timeout
@@ -27875,8 +28878,9 @@ class OrcDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the ORC data storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo".
- :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec
+ :param orc_compression_codec: The data orcCompressionCodec. Type: string (or Expression with
+ resultType string).
+ :type orc_compression_codec: object
"""
_validation = {
@@ -27895,7 +28899,7 @@ class OrcDataset(Dataset):
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
- 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'str'},
+ 'orc_compression_codec': {'key': 'typeProperties.orcCompressionCodec', 'type': 'object'},
}
def __init__(
@@ -27910,7 +28914,7 @@ def __init__(
annotations: Optional[List[object]] = None,
folder: Optional["DatasetFolder"] = None,
location: Optional["DatasetLocation"] = None,
- orc_compression_codec: Optional[Union[str, "OrcCompressionCodec"]] = None,
+ orc_compression_codec: Optional[object] = None,
**kwargs
):
super(OrcDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
@@ -27983,6 +28987,9 @@ class OrcSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: ORC store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: ORC format settings.
@@ -28001,6 +29008,7 @@ class OrcSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'},
}
@@ -28014,11 +29022,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreWriteSettings"] = None,
format_settings: Optional["OrcWriteSettings"] = None,
**kwargs
):
- super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'OrcSink' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -28043,6 +29052,9 @@ class OrcSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: ORC store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
@@ -28060,6 +29072,7 @@ class OrcSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -28071,11 +29084,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'OrcSource' # type: str
self.store_settings = store_settings
self.additional_columns = additional_columns
@@ -28220,9 +29234,9 @@ class ParquetDataset(Dataset):
:type folder: ~data_factory_management_client.models.DatasetFolder
:param location: The location of the parquet storage.
:type location: ~data_factory_management_client.models.DatasetLocation
- :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2",
- "deflate", "zipDeflate", "lz4", "tar", "tarGZip".
- :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec
+ :param compression_codec: The data compressionCodec. Type: string (or Expression with
+ resultType string).
+ :type compression_codec: object
"""
_validation = {
@@ -28241,7 +29255,7 @@ class ParquetDataset(Dataset):
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'},
- 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'},
+ 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'},
}
def __init__(
@@ -28256,7 +29270,7 @@ def __init__(
annotations: Optional[List[object]] = None,
folder: Optional["DatasetFolder"] = None,
location: Optional["DatasetLocation"] = None,
- compression_codec: Optional[Union[str, "CompressionCodec"]] = None,
+ compression_codec: Optional[object] = None,
**kwargs
):
super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
@@ -28329,6 +29343,9 @@ class ParquetSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Parquet store settings.
:type store_settings: ~data_factory_management_client.models.StoreWriteSettings
:param format_settings: Parquet format settings.
@@ -28347,6 +29364,7 @@ class ParquetSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'},
}
@@ -28360,11 +29378,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreWriteSettings"] = None,
format_settings: Optional["ParquetWriteSettings"] = None,
**kwargs
):
- super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'ParquetSink' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -28389,6 +29408,9 @@ class ParquetSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Parquet store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param additional_columns: Specifies the additional columns to be added to source data. Type:
@@ -28406,6 +29428,7 @@ class ParquetSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -28417,11 +29440,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'ParquetSource' # type: str
self.store_settings = store_settings
self.additional_columns = additional_columns
@@ -28648,6 +29672,9 @@ class PaypalSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -28669,6 +29696,7 @@ class PaypalSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -28681,12 +29709,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'PaypalSource' # type: str
self.query = query
@@ -28915,6 +29944,9 @@ class PhoenixSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -28936,6 +29968,7 @@ class PhoenixSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -28948,12 +29981,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'PhoenixSource' # type: str
self.query = query
@@ -29288,18 +30322,26 @@ class PipelineRunInvokedBy(msrest.serialization.Model):
:vartype id: str
:ivar invoked_by_type: The type of the entity that started the run.
:vartype invoked_by_type: str
+ :ivar pipeline_name: The name of the pipeline that triggered the run, if any.
+ :vartype pipeline_name: str
+ :ivar pipeline_run_id: The run id of the pipeline that triggered the run, if any.
+ :vartype pipeline_run_id: str
"""
_validation = {
'name': {'readonly': True},
'id': {'readonly': True},
'invoked_by_type': {'readonly': True},
+ 'pipeline_name': {'readonly': True},
+ 'pipeline_run_id': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'invoked_by_type': {'key': 'invokedByType', 'type': 'str'},
+ 'pipeline_name': {'key': 'pipelineName', 'type': 'str'},
+ 'pipeline_run_id': {'key': 'pipelineRunId', 'type': 'str'},
}
def __init__(
@@ -29310,6 +30352,8 @@ def __init__(
self.name = None
self.id = None
self.invoked_by_type = None
+ self.pipeline_name = None
+ self.pipeline_run_id = None
class PipelineRunsQueryResponse(msrest.serialization.Model):
@@ -29476,6 +30520,9 @@ class PostgreSqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -29496,6 +30543,7 @@ class PostgreSqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -29508,12 +30556,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'PostgreSqlSource' # type: str
self.query = query
@@ -29832,6 +30881,9 @@ class PrestoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -29853,6 +30905,7 @@ class PrestoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -29865,12 +30918,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'PrestoSource' # type: str
self.query = query
@@ -30357,6 +31411,9 @@ class QuickBooksSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -30378,6 +31435,7 @@ class QuickBooksSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -30390,12 +31448,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'QuickBooksSource' # type: str
self.query = query
@@ -30578,6 +31637,9 @@ class RelationalSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
@@ -30595,6 +31657,7 @@ class RelationalSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -30606,11 +31669,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'RelationalSource' # type: str
self.query = query
self.additional_columns = additional_columns
@@ -30975,6 +32039,9 @@ class ResponsysSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -30996,6 +32063,7 @@ class ResponsysSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -31008,12 +32076,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'ResponsysSource' # type: str
self.query = query
@@ -31166,6 +32235,8 @@ class RestServiceLinkedService(LinkedService):
encrypted using the integration runtime credential manager. Type: string (or Expression with
resultType string).
:type encrypted_credential: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
_validation = {
@@ -31193,6 +32264,7 @@ class RestServiceLinkedService(LinkedService):
'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'},
'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
+ 'credential': {'key': 'typeProperties.credential', 'type': 'CredentialReference'},
}
def __init__(
@@ -31215,6 +32287,7 @@ def __init__(
azure_cloud_type: Optional[object] = None,
aad_resource_id: Optional[object] = None,
encrypted_credential: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(RestServiceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
@@ -31231,6 +32304,7 @@ def __init__(
self.azure_cloud_type = azure_cloud_type
self.aad_resource_id = aad_resource_id
self.encrypted_credential = encrypted_credential
+ self.credential = credential
class RestSink(CopySink):
@@ -31258,6 +32332,9 @@ class RestSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param request_method: The HTTP method used to call the RESTful API. The default is POST. Type:
string (or Expression with resultType string).
:type request_method: object
@@ -31288,6 +32365,7 @@ class RestSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'request_method': {'key': 'requestMethod', 'type': 'object'},
'additional_headers': {'key': 'additionalHeaders', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
@@ -31304,6 +32382,7 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
request_method: Optional[object] = None,
additional_headers: Optional[object] = None,
http_request_timeout: Optional[object] = None,
@@ -31311,7 +32390,7 @@ def __init__(
http_compression_type: Optional[object] = None,
**kwargs
):
- super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'RestSink' # type: str
self.request_method = request_method
self.additional_headers = additional_headers
@@ -31339,6 +32418,9 @@ class RestSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param request_method: The HTTP method used to call the RESTful API. The default is GET. Type:
string (or Expression with resultType string).
:type request_method: object
@@ -31373,6 +32455,7 @@ class RestSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'request_method': {'key': 'requestMethod', 'type': 'object'},
'request_body': {'key': 'requestBody', 'type': 'object'},
'additional_headers': {'key': 'additionalHeaders', 'type': 'object'},
@@ -31389,6 +32472,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
request_method: Optional[object] = None,
request_body: Optional[object] = None,
additional_headers: Optional[object] = None,
@@ -31398,7 +32482,7 @@ def __init__(
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'RestSource' # type: str
self.request_method = request_method
self.request_body = request_body
@@ -31837,6 +32921,9 @@ class SalesforceMarketingCloudSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -31858,6 +32945,7 @@ class SalesforceMarketingCloudSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -31870,12 +32958,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SalesforceMarketingCloudSource' # type: str
self.query = query
@@ -32133,6 +33222,9 @@ class SalesforceServiceCloudSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: The write behavior for the operation. Default is Insert. Possible values
include: "Insert", "Upsert".
:type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior
@@ -32160,6 +33252,7 @@ class SalesforceServiceCloudSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
@@ -32174,12 +33267,13 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None,
external_id_field_name: Optional[object] = None,
ignore_null_values: Optional[object] = None,
**kwargs
):
- super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SalesforceServiceCloudSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SalesforceServiceCloudSink' # type: str
self.write_behavior = write_behavior
self.external_id_field_name = external_id_field_name
@@ -32205,6 +33299,9 @@ class SalesforceServiceCloudSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Database query. Type: string (or Expression with resultType string).
:type query: object
:param read_behavior: The read behavior for the operation. Default is Query. Possible values
@@ -32225,6 +33322,7 @@ class SalesforceServiceCloudSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'read_behavior': {'key': 'readBehavior', 'type': 'str'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
@@ -32237,12 +33335,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SalesforceServiceCloudSource' # type: str
self.query = query
self.read_behavior = read_behavior
@@ -32274,6 +33373,9 @@ class SalesforceSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: The write behavior for the operation. Default is Insert. Possible values
include: "Insert", "Upsert".
:type write_behavior: str or ~data_factory_management_client.models.SalesforceSinkWriteBehavior
@@ -32301,6 +33403,7 @@ class SalesforceSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'external_id_field_name': {'key': 'externalIdFieldName', 'type': 'object'},
'ignore_null_values': {'key': 'ignoreNullValues', 'type': 'object'},
@@ -32315,12 +33418,13 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
write_behavior: Optional[Union[str, "SalesforceSinkWriteBehavior"]] = None,
external_id_field_name: Optional[object] = None,
ignore_null_values: Optional[object] = None,
**kwargs
):
- super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SalesforceSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SalesforceSink' # type: str
self.write_behavior = write_behavior
self.external_id_field_name = external_id_field_name
@@ -32346,6 +33450,9 @@ class SalesforceSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -32369,6 +33476,7 @@ class SalesforceSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -32382,13 +33490,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None,
**kwargs
):
- super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SalesforceSource' # type: str
self.query = query
self.read_behavior = read_behavior
@@ -32562,6 +33671,9 @@ class SapBwSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -32582,6 +33694,7 @@ class SapBwSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -32594,12 +33707,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SapBwSource' # type: str
self.query = query
@@ -32772,6 +33886,9 @@ class SapCloudForCustomerSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param write_behavior: The write behavior for the operation. Default is 'Insert'. Possible
values include: "Insert", "Update".
:type write_behavior: str or
@@ -32795,6 +33912,7 @@ class SapCloudForCustomerSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'write_behavior': {'key': 'writeBehavior', 'type': 'str'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -32808,11 +33926,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
write_behavior: Optional[Union[str, "SapCloudForCustomerSinkWriteBehavior"]] = None,
http_request_timeout: Optional[object] = None,
**kwargs
):
- super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SapCloudForCustomerSink' # type: str
self.write_behavior = write_behavior
self.http_request_timeout = http_request_timeout
@@ -32837,6 +33956,9 @@ class SapCloudForCustomerSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -32863,6 +33985,7 @@ class SapCloudForCustomerSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -32876,13 +33999,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
http_request_timeout: Optional[object] = None,
**kwargs
):
- super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SapCloudForCustomerSource' # type: str
self.query = query
self.http_request_timeout = http_request_timeout
@@ -33050,6 +34174,9 @@ class SapEccSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -33076,6 +34203,7 @@ class SapEccSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -33089,13 +34217,14 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
http_request_timeout: Optional[object] = None,
**kwargs
):
- super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SapEccSource' # type: str
self.query = query
self.http_request_timeout = http_request_timeout
@@ -33226,6 +34355,9 @@ class SapHanaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -33255,6 +34387,7 @@ class SapHanaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -33270,6 +34403,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
@@ -33278,7 +34412,7 @@ def __init__(
partition_settings: Optional["SapHanaPartitionSettings"] = None,
**kwargs
):
- super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SapHanaSource' # type: str
self.query = query
self.packet_size = packet_size
@@ -33495,6 +34629,9 @@ class SapOpenHubSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -33527,6 +34664,7 @@ class SapOpenHubSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'},
@@ -33542,6 +34680,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
exclude_last_request: Optional[object] = None,
@@ -33550,7 +34689,7 @@ def __init__(
sap_data_column_delimiter: Optional[object] = None,
**kwargs
):
- super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SapOpenHubSource' # type: str
self.exclude_last_request = exclude_last_request
self.base_request_id = base_request_id
@@ -33917,6 +35056,9 @@ class SapTableSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -33964,6 +35106,7 @@ class SapTableSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'row_count': {'key': 'rowCount', 'type': 'object'},
@@ -33984,6 +35127,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
row_count: Optional[object] = None,
@@ -33997,7 +35141,7 @@ def __init__(
partition_settings: Optional["SapTablePartitionSettings"] = None,
**kwargs
):
- super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SapTableSource' # type: str
self.row_count = row_count
self.row_skips = row_skips
@@ -34128,9 +35272,8 @@ class ScriptAction(msrest.serialization.Model):
:type name: str
:param uri: Required. The URI for the script action.
:type uri: str
- :param roles: Required. The node types on which the script action should be executed. Possible
- values include: "Headnode", "Workernode", "Zookeeper".
- :type roles: str or ~data_factory_management_client.models.HdiNodeTypes
+ :param roles: Required. The node types on which the script action should be executed.
+ :type roles: str
:param parameters: The parameters for the script action.
:type parameters: str
"""
@@ -34153,7 +35296,7 @@ def __init__(
*,
name: str,
uri: str,
- roles: Union[str, "HdiNodeTypes"],
+ roles: str,
parameters: Optional[str] = None,
**kwargs
):
@@ -34732,6 +35875,9 @@ class ServiceNowSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -34753,6 +35899,7 @@ class ServiceNowSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -34765,16 +35912,72 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'ServiceNowSource' # type: str
self.query = query
+class ServicePrincipalCredential(Credential):
+ """Service principal credential.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param type: Required. Type of credential.Constant filled by server.
+ :type type: str
+ :param description: Credential description.
+ :type description: str
+ :param annotations: List of tags that can be used for describing the Credential.
+ :type annotations: list[object]
+ :param service_principal_id: The app ID of the service principal used to authenticate.
+ :type service_principal_id: object
+ :param service_principal_key: The key of the service principal used to authenticate.
+ :type service_principal_key:
+ ~data_factory_management_client.models.AzureKeyVaultSecretReference
+ :param tenant: The ID of the tenant to which the service principal belongs.
+ :type tenant: object
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'annotations': {'key': 'annotations', 'type': '[object]'},
+ 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'},
+ 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'AzureKeyVaultSecretReference'},
+ 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ *,
+ additional_properties: Optional[Dict[str, object]] = None,
+ description: Optional[str] = None,
+ annotations: Optional[List[object]] = None,
+ service_principal_id: Optional[object] = None,
+ service_principal_key: Optional["AzureKeyVaultSecretReference"] = None,
+ tenant: Optional[object] = None,
+ **kwargs
+ ):
+ super(ServicePrincipalCredential, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs)
+ self.type = 'ServicePrincipal' # type: str
+ self.service_principal_id = service_principal_id
+ self.service_principal_key = service_principal_key
+ self.tenant = tenant
+
+
class SetVariableActivity(Activity):
"""Set value for a Variable.
@@ -34887,6 +36090,9 @@ class SftpReadSettings(StoreReadSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param recursive: If true, files under the folder path will be read recursively. Default is
true. Type: boolean (or Expression with resultType boolean).
:type recursive: object
@@ -34924,6 +36130,7 @@ class SftpReadSettings(StoreReadSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'recursive': {'key': 'recursive', 'type': 'object'},
'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'},
'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'},
@@ -34940,6 +36147,7 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
recursive: Optional[object] = None,
wildcard_folder_path: Optional[object] = None,
wildcard_file_name: Optional[object] = None,
@@ -34951,7 +36159,7 @@ def __init__(
modified_datetime_end: Optional[object] = None,
**kwargs
):
- super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SftpReadSettings' # type: str
self.recursive = recursive
self.wildcard_folder_path = wildcard_folder_path
@@ -35095,6 +36303,9 @@ class SftpWriteSettings(StoreWriteSettings):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param copy_behavior: The type of copy behavior for copy sink.
:type copy_behavior: object
:param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default
@@ -35114,6 +36325,7 @@ class SftpWriteSettings(StoreWriteSettings):
'additional_properties': {'key': '', 'type': '{object}'},
'type': {'key': 'type', 'type': 'str'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'copy_behavior': {'key': 'copyBehavior', 'type': 'object'},
'operation_timeout': {'key': 'operationTimeout', 'type': 'object'},
'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'},
@@ -35124,12 +36336,13 @@ def __init__(
*,
additional_properties: Optional[Dict[str, object]] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
copy_behavior: Optional[object] = None,
operation_timeout: Optional[object] = None,
use_temp_file_rename: Optional[object] = None,
**kwargs
):
- super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs)
+ super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, copy_behavior=copy_behavior, **kwargs)
self.type = 'SftpWriteSettings' # type: str
self.operation_timeout = operation_timeout
self.use_temp_file_rename = use_temp_file_rename
@@ -35308,6 +36521,9 @@ class SharePointOnlineListSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: The OData query to filter the data in SharePoint Online list. For example,
"$top=1". Type: string (or Expression with resultType string).
:type query: object
@@ -35327,6 +36543,7 @@ class SharePointOnlineListSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'},
}
@@ -35338,11 +36555,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
http_request_timeout: Optional[object] = None,
**kwargs
):
- super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SharePointOnlineListSource' # type: str
self.query = query
self.http_request_timeout = http_request_timeout
@@ -35520,6 +36738,9 @@ class ShopifySource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -35541,6 +36762,7 @@ class ShopifySource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -35553,12 +36775,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'ShopifySource' # type: str
self.query = query
@@ -35851,6 +37074,9 @@ class SnowflakeSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -35870,6 +37096,7 @@ class SnowflakeSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'},
}
@@ -35883,11 +37110,12 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
import_settings: Optional["SnowflakeImportCopyCommand"] = None,
**kwargs
):
- super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SnowflakeSink' # type: str
self.pre_copy_script = pre_copy_script
self.import_settings = import_settings
@@ -35912,6 +37140,9 @@ class SnowflakeSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query: Snowflake Sql query. Type: string (or Expression with resultType string).
:type query: object
:param export_settings: Snowflake export settings.
@@ -35928,6 +37159,7 @@ class SnowflakeSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query': {'key': 'query', 'type': 'object'},
'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'},
}
@@ -35939,11 +37171,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query: Optional[object] = None,
export_settings: Optional["SnowflakeExportCopyCommand"] = None,
**kwargs
):
- super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SnowflakeSource' # type: str
self.query = query
self.export_settings = export_settings
@@ -36184,6 +37417,9 @@ class SparkSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -36205,6 +37441,7 @@ class SparkSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -36217,12 +37454,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SparkSource' # type: str
self.query = query
@@ -36294,6 +37532,9 @@ class SqlDwSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType
string).
:type pre_copy_script: object
@@ -36311,6 +37552,14 @@ class SqlDwSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into azure SQL DW. Type:
+ SqlDWWriteBehaviorEnum (or Expression with resultType SqlDWWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL DW upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlDwUpsertSettings
"""
_validation = {
@@ -36325,12 +37574,16 @@ class SqlDwSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'},
'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'},
'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'},
'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlDwUpsertSettings'},
}
def __init__(
@@ -36342,15 +37595,19 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
pre_copy_script: Optional[object] = None,
allow_poly_base: Optional[object] = None,
poly_base_settings: Optional["PolybaseSettings"] = None,
allow_copy_command: Optional[object] = None,
copy_command_settings: Optional["DwCopyCommandSettings"] = None,
table_option: Optional[object] = None,
+ sql_writer_use_table_lock: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ upsert_settings: Optional["SqlDwUpsertSettings"] = None,
**kwargs
):
- super(SqlDwSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SqlDwSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SqlDWSink' # type: str
self.pre_copy_script = pre_copy_script
self.allow_poly_base = allow_poly_base
@@ -36358,6 +37615,9 @@ def __init__(
self.allow_copy_command = allow_copy_command
self.copy_command_settings = copy_command_settings
self.table_option = table_option
+ self.sql_writer_use_table_lock = sql_writer_use_table_lock
+ self.write_behavior = write_behavior
+ self.upsert_settings = upsert_settings
class SqlDwSource(TabularSource):
@@ -36379,6 +37639,9 @@ class SqlDwSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -36413,6 +37676,7 @@ class SqlDwSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
@@ -36429,6 +37693,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
sql_reader_query: Optional[object] = None,
@@ -36438,7 +37703,7 @@ def __init__(
partition_settings: Optional["SqlPartitionSettings"] = None,
**kwargs
):
- super(SqlDwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SqlDwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SqlDWSource' # type: str
self.sql_reader_query = sql_reader_query
self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name
@@ -36447,6 +37712,34 @@ def __init__(
self.partition_settings = partition_settings
+class SqlDwUpsertSettings(msrest.serialization.Model):
+ """Sql DW upsert option settings.
+
+ :param interim_schema_name: Schema name for interim table. Type: string (or Expression with
+ resultType string).
+ :type interim_schema_name: object
+ :param keys: Key column names for unique row identification. Type: array of strings (or
+ Expression with resultType array of strings).
+ :type keys: object
+ """
+
+ _attribute_map = {
+ 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'},
+ 'keys': {'key': 'keys', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ *,
+ interim_schema_name: Optional[object] = None,
+ keys: Optional[object] = None,
+ **kwargs
+ ):
+ super(SqlDwUpsertSettings, self).__init__(**kwargs)
+ self.interim_schema_name = interim_schema_name
+ self.keys = keys
+
+
class SqlMiSink(CopySink):
"""A copy activity Azure SQL Managed Instance sink.
@@ -36472,6 +37765,9 @@ class SqlMiSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -36490,6 +37786,14 @@ class SqlMiSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: White behavior when copying data into azure SQL MI. Type:
+ SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -36504,12 +37808,16 @@ class SqlMiSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -36521,15 +37829,19 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
sql_writer_stored_procedure_name: Optional[object] = None,
sql_writer_table_type: Optional[object] = None,
pre_copy_script: Optional[object] = None,
stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None,
stored_procedure_table_type_parameter_name: Optional[object] = None,
table_option: Optional[object] = None,
+ sql_writer_use_table_lock: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ upsert_settings: Optional["SqlUpsertSettings"] = None,
**kwargs
):
- super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SqlMISink' # type: str
self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name
self.sql_writer_table_type = sql_writer_table_type
@@ -36537,6 +37849,9 @@ def __init__(
self.stored_procedure_parameters = stored_procedure_parameters
self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name
self.table_option = table_option
+ self.sql_writer_use_table_lock = sql_writer_use_table_lock
+ self.write_behavior = write_behavior
+ self.upsert_settings = upsert_settings
class SqlMiSource(TabularSource):
@@ -36558,6 +37873,9 @@ class SqlMiSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -36593,6 +37911,7 @@ class SqlMiSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
@@ -36610,6 +37929,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
sql_reader_query: Optional[object] = None,
@@ -36620,7 +37940,7 @@ def __init__(
partition_settings: Optional["SqlPartitionSettings"] = None,
**kwargs
):
- super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SqlMISource' # type: str
self.sql_reader_query = sql_reader_query
self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name
@@ -36772,6 +38092,9 @@ class SqlServerSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -36790,6 +38113,14 @@ class SqlServerSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into sql server. Type:
+ SqlWriteBehaviorEnum (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -36804,12 +38135,16 @@ class SqlServerSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -36821,15 +38156,19 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
sql_writer_stored_procedure_name: Optional[object] = None,
sql_writer_table_type: Optional[object] = None,
pre_copy_script: Optional[object] = None,
stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None,
stored_procedure_table_type_parameter_name: Optional[object] = None,
table_option: Optional[object] = None,
+ sql_writer_use_table_lock: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ upsert_settings: Optional["SqlUpsertSettings"] = None,
**kwargs
):
- super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SqlServerSink' # type: str
self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name
self.sql_writer_table_type = sql_writer_table_type
@@ -36837,6 +38176,9 @@ def __init__(
self.stored_procedure_parameters = stored_procedure_parameters
self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name
self.table_option = table_option
+ self.sql_writer_use_table_lock = sql_writer_use_table_lock
+ self.write_behavior = write_behavior
+ self.upsert_settings = upsert_settings
class SqlServerSource(TabularSource):
@@ -36858,6 +38200,9 @@ class SqlServerSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -36893,6 +38238,7 @@ class SqlServerSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
@@ -36910,6 +38256,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
sql_reader_query: Optional[object] = None,
@@ -36920,7 +38267,7 @@ def __init__(
partition_settings: Optional["SqlPartitionSettings"] = None,
**kwargs
):
- super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SqlServerSource' # type: str
self.sql_reader_query = sql_reader_query
self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name
@@ -37106,6 +38453,9 @@ class SqlSink(CopySink):
:param max_concurrent_connections: The maximum concurrent connection count for the sink data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param sql_writer_stored_procedure_name: SQL writer stored procedure name. Type: string (or
Expression with resultType string).
:type sql_writer_stored_procedure_name: object
@@ -37124,6 +38474,14 @@ class SqlSink(CopySink):
:param table_option: The option to handle sink table, such as autoCreate. For now only
'autoCreate' value is supported. Type: string (or Expression with resultType string).
:type table_option: object
+ :param sql_writer_use_table_lock: Whether to use table lock during bulk copy. Type: boolean (or
+ Expression with resultType boolean).
+ :type sql_writer_use_table_lock: object
+ :param write_behavior: Write behavior when copying data into sql. Type: SqlWriteBehaviorEnum
+ (or Expression with resultType SqlWriteBehaviorEnum).
+ :type write_behavior: object
+ :param upsert_settings: SQL upsert settings.
+ :type upsert_settings: ~data_factory_management_client.models.SqlUpsertSettings
"""
_validation = {
@@ -37138,12 +38496,16 @@ class SqlSink(CopySink):
'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'},
'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'sql_writer_stored_procedure_name': {'key': 'sqlWriterStoredProcedureName', 'type': 'object'},
'sql_writer_table_type': {'key': 'sqlWriterTableType', 'type': 'object'},
'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'},
'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'},
'stored_procedure_table_type_parameter_name': {'key': 'storedProcedureTableTypeParameterName', 'type': 'object'},
'table_option': {'key': 'tableOption', 'type': 'object'},
+ 'sql_writer_use_table_lock': {'key': 'sqlWriterUseTableLock', 'type': 'object'},
+ 'write_behavior': {'key': 'writeBehavior', 'type': 'object'},
+ 'upsert_settings': {'key': 'upsertSettings', 'type': 'SqlUpsertSettings'},
}
def __init__(
@@ -37155,15 +38517,19 @@ def __init__(
sink_retry_count: Optional[object] = None,
sink_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
sql_writer_stored_procedure_name: Optional[object] = None,
sql_writer_table_type: Optional[object] = None,
pre_copy_script: Optional[object] = None,
stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None,
stored_procedure_table_type_parameter_name: Optional[object] = None,
table_option: Optional[object] = None,
+ sql_writer_use_table_lock: Optional[object] = None,
+ write_behavior: Optional[object] = None,
+ upsert_settings: Optional["SqlUpsertSettings"] = None,
**kwargs
):
- super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'SqlSink' # type: str
self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name
self.sql_writer_table_type = sql_writer_table_type
@@ -37171,6 +38537,9 @@ def __init__(
self.stored_procedure_parameters = stored_procedure_parameters
self.stored_procedure_table_type_parameter_name = stored_procedure_table_type_parameter_name
self.table_option = table_option
+ self.sql_writer_use_table_lock = sql_writer_use_table_lock
+ self.write_behavior = write_behavior
+ self.upsert_settings = upsert_settings
class SqlSource(TabularSource):
@@ -37192,6 +38561,9 @@ class SqlSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -37229,6 +38601,7 @@ class SqlSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'},
@@ -37246,6 +38619,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
sql_reader_query: Optional[object] = None,
@@ -37256,7 +38630,7 @@ def __init__(
partition_settings: Optional["SqlPartitionSettings"] = None,
**kwargs
):
- super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SqlSource' # type: str
self.sql_reader_query = sql_reader_query
self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name
@@ -37266,6 +38640,40 @@ def __init__(
self.partition_settings = partition_settings
+class SqlUpsertSettings(msrest.serialization.Model):
+ """Sql upsert option settings.
+
+ :param use_temp_db: Specifies whether to use temp db for upsert interim table. Type: boolean
+ (or Expression with resultType boolean).
+ :type use_temp_db: object
+ :param interim_schema_name: Schema name for interim table. Type: string (or Expression with
+ resultType string).
+ :type interim_schema_name: object
+ :param keys: Key column names for unique row identification. Type: array of strings (or
+ Expression with resultType array of strings).
+ :type keys: object
+ """
+
+ _attribute_map = {
+ 'use_temp_db': {'key': 'useTempDB', 'type': 'object'},
+ 'interim_schema_name': {'key': 'interimSchemaName', 'type': 'object'},
+ 'keys': {'key': 'keys', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ *,
+ use_temp_db: Optional[object] = None,
+ interim_schema_name: Optional[object] = None,
+ keys: Optional[object] = None,
+ **kwargs
+ ):
+ super(SqlUpsertSettings, self).__init__(**kwargs)
+ self.use_temp_db = use_temp_db
+ self.interim_schema_name = interim_schema_name
+ self.keys = keys
+
+
class SquareLinkedService(LinkedService):
"""Square Service linked service.
@@ -37453,6 +38861,9 @@ class SquareSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -37474,6 +38885,7 @@ class SquareSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -37486,12 +38898,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SquareSource' # type: str
self.query = query
@@ -38555,6 +39968,9 @@ class SybaseSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -38575,6 +39991,7 @@ class SybaseSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -38587,12 +40004,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'SybaseSource' # type: str
self.query = query
@@ -38955,6 +40373,9 @@ class TeradataSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -38981,6 +40402,7 @@ class TeradataSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -38995,6 +40417,7 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
@@ -39002,7 +40425,7 @@ def __init__(
partition_settings: Optional["TeradataPartitionSettings"] = None,
**kwargs
):
- super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'TeradataSource' # type: str
self.query = query
self.partition_option = partition_option
@@ -40159,6 +41582,9 @@ class VerticaSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -40180,6 +41606,7 @@ class VerticaSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -40192,12 +41619,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'VerticaSource' # type: str
self.query = query
@@ -40443,10 +41871,7 @@ def __init__(
class WebActivityAuthentication(msrest.serialization.Model):
"""Web activity authentication properties.
- All required parameters must be populated in order to send to Azure.
-
- :param type: Required. Web activity authentication
- (Basic/ClientCertificate/MSI/ServicePrincipal).
+ :param type: Web activity authentication (Basic/ClientCertificate/MSI/ServicePrincipal).
:type type: str
:param pfx: Base64-encoded contents of a PFX file or Certificate when used for
ServicePrincipal.
@@ -40463,12 +41888,10 @@ class WebActivityAuthentication(msrest.serialization.Model):
:param user_tenant: TenantId for which Azure Auth token will be requested when using
ServicePrincipal Authentication. Type: string (or Expression with resultType string).
:type user_tenant: object
+ :param credential: The credential reference containing authentication information.
+ :type credential: ~data_factory_management_client.models.CredentialReference
"""
- _validation = {
- 'type': {'required': True},
- }
-
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'pfx': {'key': 'pfx', 'type': 'SecretBase'},
@@ -40476,17 +41899,19 @@ class WebActivityAuthentication(msrest.serialization.Model):
'password': {'key': 'password', 'type': 'SecretBase'},
'resource': {'key': 'resource', 'type': 'object'},
'user_tenant': {'key': 'userTenant', 'type': 'object'},
+ 'credential': {'key': 'credential', 'type': 'CredentialReference'},
}
def __init__(
self,
*,
- type: str,
+ type: Optional[str] = None,
pfx: Optional["SecretBase"] = None,
username: Optional[object] = None,
password: Optional["SecretBase"] = None,
resource: Optional[object] = None,
user_tenant: Optional[object] = None,
+ credential: Optional["CredentialReference"] = None,
**kwargs
):
super(WebActivityAuthentication, self).__init__(**kwargs)
@@ -40496,6 +41921,7 @@ def __init__(
self.password = password
self.resource = resource
self.user_tenant = user_tenant
+ self.credential = credential
class WebLinkedServiceTypeProperties(msrest.serialization.Model):
@@ -40833,6 +42259,9 @@ class WebSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param additional_columns: Specifies the additional columns to be added to source data. Type:
array of objects (or Expression with resultType array of objects).
:type additional_columns: list[~data_factory_management_client.models.AdditionalColumns]
@@ -40848,6 +42277,7 @@ class WebSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
}
@@ -40858,10 +42288,11 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'WebSource' # type: str
self.additional_columns = additional_columns
@@ -41125,6 +42556,9 @@ class XeroSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -41146,6 +42580,7 @@ class XeroSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -41158,12 +42593,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'XeroSource' # type: str
self.query = query
@@ -41336,6 +42772,9 @@ class XmlSource(CopySource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param store_settings: Xml store settings.
:type store_settings: ~data_factory_management_client.models.StoreReadSettings
:param format_settings: Xml format settings.
@@ -41355,6 +42794,7 @@ class XmlSource(CopySource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'},
'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
@@ -41367,12 +42807,13 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
store_settings: Optional["StoreReadSettings"] = None,
format_settings: Optional["XmlReadSettings"] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
**kwargs
):
- super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs)
+ super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, **kwargs)
self.type = 'XmlSource' # type: str
self.store_settings = store_settings
self.format_settings = format_settings
@@ -41592,6 +43033,9 @@ class ZohoSource(TabularSource):
:param max_concurrent_connections: The maximum concurrent connection count for the source data
store. Type: integer (or Expression with resultType integer).
:type max_concurrent_connections: object
+ :param disable_metrics_collection: If true, disable data store metrics collection. Default is
+ false. Type: boolean (or Expression with resultType boolean).
+ :type disable_metrics_collection: object
:param query_timeout: Query timeout. Type: string (or Expression with resultType string),
pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
:type query_timeout: object
@@ -41613,6 +43057,7 @@ class ZohoSource(TabularSource):
'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'},
'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'},
'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'},
+ 'disable_metrics_collection': {'key': 'disableMetricsCollection', 'type': 'object'},
'query_timeout': {'key': 'queryTimeout', 'type': 'object'},
'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'},
'query': {'key': 'query', 'type': 'object'},
@@ -41625,11 +43070,12 @@ def __init__(
source_retry_count: Optional[object] = None,
source_retry_wait: Optional[object] = None,
max_concurrent_connections: Optional[object] = None,
+ disable_metrics_collection: Optional[object] = None,
query_timeout: Optional[object] = None,
additional_columns: Optional[List["AdditionalColumns"]] = None,
query: Optional[object] = None,
**kwargs
):
- super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
+ super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, disable_metrics_collection=disable_metrics_collection, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs)
self.type = 'ZohoSource' # type: str
self.query = query
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py
deleted file mode 100644
index 192e09232ad..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-import datetime
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class ActivityRunOperations(object):
- """ActivityRunOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def query_by_pipeline_run(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- run_id, # type: str
- last_updated_after, # type: datetime.datetime
- last_updated_before, # type: datetime.datetime
- continuation_token_parameter=None, # type: Optional[str]
- filters=None, # type: Optional[List["models.RunQueryFilter"]]
- order_by=None, # type: Optional[List["models.RunQueryOrderBy"]]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ActivityRunsQueryResponse"
- """Query activity runs based on input filter conditions.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_after: ~datetime.datetime
- :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_before: ~datetime.datetime
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param filters: List of filters.
- :type filters: list[~data_factory_management_client.models.RunQueryFilter]
- :param order_by: List of OrderBy option.
- :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ActivityRunsQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_pipeline_run.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'RunFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ActivityRunsQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_pipeline_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/queryActivityruns'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py
deleted file mode 100644
index e0bd3be1783..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py
+++ /dev/null
@@ -1,317 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class DataFlowOperations(object):
- """DataFlowOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- data_flow_name, # type: str
- properties, # type: "models.DataFlow"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.DataFlowResource"
- """Creates or updates a data flow.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param data_flow_name: The data flow name.
- :type data_flow_name: str
- :param properties: Data flow properties.
- :type properties: ~data_factory_management_client.models.DataFlow
- :param if_match: ETag of the data flow entity. Should only be specified for update, for which
- it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataFlowResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DataFlowResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- data_flow = models.DataFlowResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(data_flow, 'DataFlowResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('DataFlowResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- data_flow_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.DataFlowResource"
- """Gets a data flow.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param data_flow_name: The data flow name.
- :type data_flow_name: str
- :param if_none_match: ETag of the data flow entity. Should only be specified for get. If the
- ETag matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DataFlowResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DataFlowResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('DataFlowResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- data_flow_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a data flow.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param data_flow_name: The data flow name.
- :type data_flow_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} # type: ignore
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.DataFlowListResponse"]
- """Lists data flows.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either DataFlowListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DataFlowListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('DataFlowListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py
deleted file mode 100644
index 2f866416c74..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py
+++ /dev/null
@@ -1,319 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class DatasetOperations(object):
- """DatasetOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.DatasetListResponse"]
- """Lists datasets.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either DatasetListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.DatasetListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('DatasetListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- dataset_name, # type: str
- properties, # type: "models.Dataset"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.DatasetResource"
- """Creates or updates a dataset.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param dataset_name: The dataset name.
- :type dataset_name: str
- :param properties: Dataset properties.
- :type properties: ~data_factory_management_client.models.Dataset
- :param if_match: ETag of the dataset entity. Should only be specified for update, for which it
- should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DatasetResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DatasetResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- dataset = models.DatasetResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(dataset, 'DatasetResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('DatasetResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- dataset_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.DatasetResource"]
- """Gets a dataset.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param dataset_name: The dataset name.
- :type dataset_name: str
- :param if_none_match: ETag of the dataset entity. Should only be specified for get. If the ETag
- matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: DatasetResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.DatasetResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('DatasetResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- dataset_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a dataset.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param dataset_name: The dataset name.
- :type dataset_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'datasetName': self._serialize.url("dataset_name", dataset_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/datasets/{datasetName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py
deleted file mode 100644
index 5b8622e97f9..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py
+++ /dev/null
@@ -1,671 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class FactoryOperations(object):
- """FactoryOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list(
- self,
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.FactoryListResponse"]
- """Lists factories under the specified subscription.
-
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either FactoryListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('FactoryListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/factories'} # type: ignore
-
- def configure_factory_repo(
- self,
- location_id, # type: str
- factory_resource_id=None, # type: Optional[str]
- repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.Factory"
- """Updates a factory's repo information.
-
- :param location_id: The location identifier.
- :type location_id: str
- :param factory_resource_id: The factory resource id.
- :type factory_resource_id: str
- :param repo_configuration: Git repo information of the factory.
- :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.configure_factory_repo.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'locationId': self._serialize.url("location_id", location_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- configure_factory_repo.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataFactory/locations/{locationId}/configureFactoryRepo'} # type: ignore
-
- def list_by_resource_group(
- self,
- resource_group_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.FactoryListResponse"]
- """Lists factories.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either FactoryListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.FactoryListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_resource_group.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('FactoryListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- if_match=None, # type: Optional[str]
- location=None, # type: Optional[str]
- tags=None, # type: Optional[Dict[str, str]]
- identity=None, # type: Optional["models.FactoryIdentity"]
- repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"]
- global_parameters=None, # type: Optional[Dict[str, "models.GlobalParameterSpecification"]]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.Factory"
- """Creates or updates a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param if_match: ETag of the factory entity. Should only be specified for update, for which it
- should match existing entity or can be * for unconditional update.
- :type if_match: str
- :param location: The resource location.
- :type location: str
- :param tags: The resource tags.
- :type tags: dict[str, str]
- :param identity: Managed service identity of the factory.
- :type identity: ~data_factory_management_client.models.FactoryIdentity
- :param repo_configuration: Git repo information of the factory.
- :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration
- :param global_parameters: List of parameters for factory.
- :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(factory, 'Factory')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- def update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- tags=None, # type: Optional[Dict[str, str]]
- identity=None, # type: Optional["models.FactoryIdentity"]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.Factory"
- """Updates a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param tags: The resource tags.
- :type tags: dict[str, str]
- :param identity: Managed service identity of the factory.
- :type identity: ~data_factory_management_client.models.FactoryIdentity
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.Factory"]
- """Gets a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param if_none_match: ETag of the factory entity. Should only be specified for get. If the ETag
- matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: Factory, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.Factory or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('Factory', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a factory.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}'} # type: ignore
-
- def get_git_hub_access_token(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- git_hub_access_code, # type: str
- git_hub_access_token_base_url, # type: str
- git_hub_client_id=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.GitHubAccessTokenResponse"
- """Get GitHub Access Token.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param git_hub_access_code: GitHub access code.
- :type git_hub_access_code: str
- :param git_hub_access_token_base_url: GitHub access token base URL.
- :type git_hub_access_token_base_url: str
- :param git_hub_client_id: GitHub application client ID.
- :type git_hub_client_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: GitHubAccessTokenResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get_git_hub_access_token.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('GitHubAccessTokenResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_git_hub_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getGitHubAccessToken'} # type: ignore
-
- def get_data_plane_access(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- permissions=None, # type: Optional[str]
- access_resource_path=None, # type: Optional[str]
- profile_name=None, # type: Optional[str]
- start_time=None, # type: Optional[str]
- expire_time=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.AccessPolicyResponse"
- """Get Data Plane access.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param permissions: The string with permissions for Data Plane access. Currently only 'r' is
- supported which grants read only access.
- :type permissions: str
- :param access_resource_path: The resource path to get access relative to factory. Currently
- only empty string is supported which corresponds to the factory resource.
- :type access_resource_path: str
- :param profile_name: The name of the profile. Currently only the default is supported. The
- default value is DefaultProfile.
- :type profile_name: str
- :param start_time: Start time for the token. If not specified the current time will be used.
- :type start_time: str
- :param expire_time: Expiration time for the token. Maximum duration for the token is eight
- hours and by default the token will expire in eight hours.
- :type expire_time: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: AccessPolicyResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.AccessPolicyResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.get_data_plane_access.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(policy, 'UserAccessPolicy')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('AccessPolicyResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_data_plane_access.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getDataPlaneAccess'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py
deleted file mode 100644
index a7903633080..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py
+++ /dev/null
@@ -1,309 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class IntegrationRuntimeNodeOperations(object):
- """IntegrationRuntimeNodeOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- node_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.SelfHostedIntegrationRuntimeNode"
- """Gets a self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- node_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore
-
- def update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- node_name, # type: str
- concurrent_jobs_limit=None, # type: Optional[int]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.SelfHostedIntegrationRuntimeNode"
- """Updates a self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration
- runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.
- :type concurrent_jobs_limit: int
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('SelfHostedIntegrationRuntimeNode', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}'} # type: ignore
-
- def get_ip_address(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- node_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeNodeIpAddress"
- """Get the IP address of self-hosted integration runtime node.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param node_name: The integration runtime node name.
- :type node_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeNodeIpAddress, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeNodeIpAddress
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_ip_address.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'nodeName': self._serialize.url("node_name", node_name, 'str', max_length=150, min_length=1, pattern=r'^[a-z0-9A-Z][a-z0-9A-Z_-]{0,149}$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeNodeIpAddress', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/nodes/{nodeName}/ipAddress'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py
deleted file mode 100644
index 1fb5fc6b30d..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py
+++ /dev/null
@@ -1,1198 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.mgmt.core.exceptions import ARMErrorFormat
-from azure.mgmt.core.polling.arm_polling import ARMPolling
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class IntegrationRuntimeOperations(object):
- """IntegrationRuntimeOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.IntegrationRuntimeListResponse"]
- """Lists integration runtimes.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either IntegrationRuntimeListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.IntegrationRuntimeListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- properties, # type: "models.IntegrationRuntime"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeResource"
- """Creates or updates an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param properties: Integration runtime properties.
- :type properties: ~data_factory_management_client.models.IntegrationRuntime
- :param if_match: ETag of the integration runtime entity. Should only be specified for update,
- for which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- integration_runtime = models.IntegrationRuntimeResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.IntegrationRuntimeResource"]
- """Gets an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param if_none_match: ETag of the integration runtime entity. Should only be specified for get.
- If the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- def update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- auto_update=None, # type: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]]
- update_delay_offset=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeResource"
- """Updates an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param auto_update: Enables or disables the auto-update feature of the self-hosted integration
- runtime. See https://go.microsoft.com/fwlink/?linkid=854189.
- :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate
- :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The
- integration runtime auto update will happen on that time.
- :type update_delay_offset: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore
-
- def get_status(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeStatusResponse"
- """Gets detailed status information for an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeStatusResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_status.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore
-
- def get_connection_info(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeConnectionInfo"
- """Gets the on-premises integration runtime connection information for encrypting the on-premises
- data source credentials.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeConnectionInfo, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeConnectionInfo
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_connection_info.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeConnectionInfo', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore
-
- def regenerate_auth_key(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- key_name=None, # type: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeAuthKeys"
- """Regenerates the authentication key for an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param key_name: The name of the authentication key to regenerate.
- :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeAuthKeys, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.regenerate_auth_key.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore
-
- def list_auth_key(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeAuthKeys"
- """Retrieves the authentication keys for an integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeAuthKeys, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.list_auth_key.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeAuthKeys', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore
-
- def _start_initial(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.IntegrationRuntimeStatusResponse"]
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._start_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore
-
- def begin_start(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> LROPoller["models.IntegrationRuntimeStatusResponse"]
- """Starts a ManagedReserved type integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of LROPoller that returns either IntegrationRuntimeStatusResponse or the result of cls(response)
- :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.IntegrationRuntimeStatusResponse]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = self._start_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- integration_runtime_name=integration_runtime_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = NoPolling()
- else: polling_method = polling
- if cont_token:
- return LROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore
-
- def _stop_initial(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._stop_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore
-
- def begin_stop(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> LROPoller[None]
- """Stops a ManagedReserved type integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of LROPoller that returns either None or the result of cls(response)
- :rtype: ~azure.core.polling.LROPoller[None]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = self._stop_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- integration_runtime_name=integration_runtime_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- if cls:
- return cls(pipeline_response, None, {})
-
- if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = NoPolling()
- else: polling_method = polling
- if cont_token:
- return LROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore
-
- def sync_credentials(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Force the integration runtime to synchronize credentials across integration runtime nodes, and
- this will override the credentials across all worker nodes with those available on the
- dispatcher node. If you already have the latest credential backup file, you should manually
- import it (preferred) on any self-hosted integration runtime node than using this API directly.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.sync_credentials.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore
-
- def get_monitoring_data(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeMonitoringData"
- """Get the integration runtime monitoring data, which includes the monitor data for all the nodes
- under this integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeMonitoringData, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeMonitoringData
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_monitoring_data.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeMonitoringData', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore
-
- def upgrade(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Upgrade self-hosted integration runtime to latest version if availability.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.upgrade.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore
-
- def remove_link(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- linked_factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Remove all linked integration runtimes under specific data factory in a self-hosted integration
- runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param linked_factory_name: The data factory name for linked integration runtime.
- :type linked_factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.remove_link.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore
-
- def create_linked_integration_runtime(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- integration_runtime_name, # type: str
- name=None, # type: Optional[str]
- subscription_id=None, # type: Optional[str]
- data_factory_name=None, # type: Optional[str]
- data_factory_location=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.IntegrationRuntimeStatusResponse"
- """Create a linked integration runtime entry in a shared integration runtime.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param integration_runtime_name: The integration runtime name.
- :type integration_runtime_name: str
- :param name: The name of the linked integration runtime.
- :type name: str
- :param subscription_id: The ID of the subscription that the linked integration runtime belongs
- to.
- :type subscription_id: str
- :param data_factory_name: The name of the data factory that the linked integration runtime
- belongs to.
- :type data_factory_name: str
- :param data_factory_location: The location of the data factory that the linked integration
- runtime belongs to.
- :type data_factory_location: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: IntegrationRuntimeStatusResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_linked_integration_runtime.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py
deleted file mode 100644
index 7124cb588eb..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py
+++ /dev/null
@@ -1,320 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class LinkedServiceOperations(object):
- """LinkedServiceOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.LinkedServiceListResponse"]
- """Lists linked services.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either LinkedServiceListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.LinkedServiceListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('LinkedServiceListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- linked_service_name, # type: str
- properties, # type: "models.LinkedService"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.LinkedServiceResource"
- """Creates or updates a linked service.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param linked_service_name: The linked service name.
- :type linked_service_name: str
- :param properties: Properties of linked service.
- :type properties: ~data_factory_management_client.models.LinkedService
- :param if_match: ETag of the linkedService entity. Should only be specified for update, for
- which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: LinkedServiceResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.LinkedServiceResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- linked_service = models.LinkedServiceResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(linked_service, 'LinkedServiceResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('LinkedServiceResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- linked_service_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.LinkedServiceResource"]
- """Gets a linked service.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param linked_service_name: The linked service name.
- :type linked_service_name: str
- :param if_none_match: ETag of the linked service entity. Should only be specified for get. If
- the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: LinkedServiceResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.LinkedServiceResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('LinkedServiceResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- linked_service_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a linked service.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param linked_service_name: The linked service name.
- :type linked_service_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/linkedservices/{linkedServiceName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py
deleted file mode 100644
index 29be0bd0e6d..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py
+++ /dev/null
@@ -1,344 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class ManagedPrivateEndpointOperations(object):
- """ManagedPrivateEndpointOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- managed_virtual_network_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.ManagedPrivateEndpointListResponse"]
- """Lists managed private endpoints.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either ManagedPrivateEndpointListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedPrivateEndpointListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('ManagedPrivateEndpointListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- managed_virtual_network_name, # type: str
- managed_private_endpoint_name, # type: str
- if_match=None, # type: Optional[str]
- connection_state=None, # type: Optional["models.ConnectionStateProperties"]
- fqdns=None, # type: Optional[List[str]]
- group_id=None, # type: Optional[str]
- private_link_resource_id=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ManagedPrivateEndpointResource"
- """Creates or updates a managed private endpoint.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param managed_private_endpoint_name: Managed private endpoint name.
- :type managed_private_endpoint_name: str
- :param if_match: ETag of the managed private endpoint entity. Should only be specified for
- update, for which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :param connection_state: The managed private endpoint connection state.
- :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties
- :param fqdns: Fully qualified domain names.
- :type fqdns: list[str]
- :param group_id: The groupId to which the managed private endpoint is created.
- :type group_id: str
- :param private_link_resource_id: The ARM resource ID of the resource to which the managed
- private endpoint is created.
- :type private_link_resource_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedPrivateEndpointResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- managed_virtual_network_name, # type: str
- managed_private_endpoint_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ManagedPrivateEndpointResource"
- """Gets a managed private endpoint.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param managed_private_endpoint_name: Managed private endpoint name.
- :type managed_private_endpoint_name: str
- :param if_none_match: ETag of the managed private endpoint entity. Should only be specified for
- get. If the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedPrivateEndpointResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedPrivateEndpointResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- managed_virtual_network_name, # type: str
- managed_private_endpoint_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a managed private endpoint.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param managed_private_endpoint_name: Managed private endpoint name.
- :type managed_private_endpoint_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- 'managedPrivateEndpointName': self._serialize.url("managed_private_endpoint_name", managed_private_endpoint_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py
deleted file mode 100644
index fa043ca3e59..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py
+++ /dev/null
@@ -1,262 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class ManagedVirtualNetworkOperations(object):
- """ManagedVirtualNetworkOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.ManagedVirtualNetworkListResponse"]
- """Lists managed Virtual Networks.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either ManagedVirtualNetworkListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.ManagedVirtualNetworkListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('ManagedVirtualNetworkListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- managed_virtual_network_name, # type: str
- properties, # type: "models.ManagedVirtualNetwork"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ManagedVirtualNetworkResource"
- """Creates or updates a managed Virtual Network.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param properties: Managed Virtual Network properties.
- :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork
- :param if_match: ETag of the managed Virtual Network entity. Should only be specified for
- update, for which it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedVirtualNetworkResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- managed_virtual_network_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.ManagedVirtualNetworkResource"
- """Gets a managed Virtual Network.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param managed_virtual_network_name: Managed virtual network name.
- :type managed_virtual_network_name: str
- :param if_none_match: ETag of the managed Virtual Network entity. Should only be specified for
- get. If the ETag matches the existing entity tag, or if * was provided, then no content will be
- returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: ManagedVirtualNetworkResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.ManagedVirtualNetworkResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'managedVirtualNetworkName': self._serialize.url("managed_virtual_network_name", managed_virtual_network_name, 'str', max_length=127, min_length=1, pattern=r'^([_A-Za-z0-9]|([_A-Za-z0-9][-_A-Za-z0-9]{0,125}[_A-Za-z0-9]))$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('ManagedVirtualNetworkResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/managedVirtualNetworks/{managedVirtualNetworkName}'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py
deleted file mode 100644
index c5cf3d43f6d..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class OperationOperations(object):
- """OperationOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list(
- self,
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.OperationListResponse"]
- """Lists the available Azure Data Factory API operations.
-
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either OperationListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.OperationListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list.metadata['url'] # type: ignore
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('OperationListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list.metadata = {'url': '/providers/Microsoft.DataFactory/operations'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py
deleted file mode 100644
index d82f423f2cb..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py
+++ /dev/null
@@ -1,414 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class PipelineOperations(object):
- """PipelineOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.PipelineListResponse"]
- """Lists pipelines.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either PipelineListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.PipelineListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('PipelineListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- pipeline_name, # type: str
- pipeline, # type: "models.PipelineResource"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.PipelineResource"
- """Creates or updates a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :param pipeline: Pipeline resource definition.
- :type pipeline: ~data_factory_management_client.models.PipelineResource
- :param if_match: ETag of the pipeline entity. Should only be specified for update, for which
- it should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(pipeline, 'PipelineResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('PipelineResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- pipeline_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.PipelineResource"]
- """Gets a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :param if_none_match: ETag of the pipeline entity. Should only be specified for get. If the
- ETag matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('PipelineResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- pipeline_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} # type: ignore
-
- def create_run(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- pipeline_name, # type: str
- reference_pipeline_run_id=None, # type: Optional[str]
- is_recovery=None, # type: Optional[bool]
- start_activity_name=None, # type: Optional[str]
- start_from_failure=None, # type: Optional[bool]
- parameters=None, # type: Optional[Dict[str, object]]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.CreateRunResponse"
- """Creates a run of a pipeline.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param pipeline_name: The pipeline name.
- :type pipeline_name: str
- :param reference_pipeline_run_id: The pipeline run identifier. If run ID is specified the
- parameters of the specified run will be used to create a new run.
- :type reference_pipeline_run_id: str
- :param is_recovery: Recovery mode flag. If recovery mode is set to true, the specified
- referenced pipeline run and the new run will be grouped under the same groupId.
- :type is_recovery: bool
- :param start_activity_name: In recovery mode, the rerun will start from this activity. If not
- specified, all activities will run.
- :type start_activity_name: str
- :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed
- activities. The property will be used only if startActivityName is not specified.
- :type start_from_failure: bool
- :param parameters: Parameters of the pipeline run. These parameters will be used only if the
- runId is not specified.
- :type parameters: dict[str, object]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: CreateRunResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.CreateRunResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_run.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'pipelineName': self._serialize.url("pipeline_name", pipeline_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
- if reference_pipeline_run_id is not None:
- query_parameters['referencePipelineRunId'] = self._serialize.query("reference_pipeline_run_id", reference_pipeline_run_id, 'str')
- if is_recovery is not None:
- query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool')
- if start_activity_name is not None:
- query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str')
- if start_from_failure is not None:
- query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- if parameters is not None:
- body_content = self._serialize.body(parameters, '{object}')
- else:
- body_content = None
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('CreateRunResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_run.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}/createRun'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py
deleted file mode 100644
index 75634fde5ac..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-import datetime
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class PipelineRunOperations(object):
- """PipelineRunOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def query_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- last_updated_after, # type: datetime.datetime
- last_updated_before, # type: datetime.datetime
- continuation_token_parameter=None, # type: Optional[str]
- filters=None, # type: Optional[List["models.RunQueryFilter"]]
- order_by=None, # type: Optional[List["models.RunQueryOrderBy"]]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.PipelineRunsQueryResponse"
- """Query pipeline runs in the factory based on input filter conditions.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_after: ~datetime.datetime
- :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_before: ~datetime.datetime
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param filters: List of filters.
- :type filters: list[~data_factory_management_client.models.RunQueryFilter]
- :param order_by: List of OrderBy option.
- :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineRunsQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'RunFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('PipelineRunsQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryPipelineRuns'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- run_id, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.PipelineRun"
- """Get a pipeline run by its run ID.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: PipelineRun, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.PipelineRun
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('PipelineRun', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}'} # type: ignore
-
- def cancel(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- run_id, # type: str
- is_recursive=None, # type: Optional[bool]
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Cancel a pipeline run by its run ID.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :param is_recursive: If true, cancel all the Child pipelines that are triggered by the current
- pipeline.
- :type is_recursive: bool
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.cancel.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- if is_recursive is not None:
- query_parameters['isRecursive'] = self._serialize.query("is_recursive", is_recursive, 'bool')
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelineruns/{runId}/cancel'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py
deleted file mode 100644
index 142f32f2c31..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py
+++ /dev/null
@@ -1,895 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.paging import ItemPaged
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.core.polling import LROPoller, NoPolling, PollingMethod
-from azure.mgmt.core.exceptions import ARMErrorFormat
-from azure.mgmt.core.polling.arm_polling import ARMPolling
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class TriggerOperations(object):
- """TriggerOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def list_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Iterable["models.TriggerListResponse"]
- """Lists triggers.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: An iterator like instance of either TriggerListResponse or the result of cls(response)
- :rtype: ~azure.core.paging.ItemPaged[~data_factory_management_client.models.TriggerListResponse]
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- def prepare_request(next_link=None):
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- if not next_link:
- # Construct URL
- url = self.list_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- request = self._client.get(url, query_parameters, header_parameters)
- else:
- url = next_link
- query_parameters = {} # type: Dict[str, Any]
- request = self._client.get(url, query_parameters, header_parameters)
- return request
-
- def extract_data(pipeline_response):
- deserialized = self._deserialize('TriggerListResponse', pipeline_response)
- list_of_elem = deserialized.value
- if cls:
- list_of_elem = cls(list_of_elem)
- return deserialized.next_link or None, iter(list_of_elem)
-
- def get_next(next_link=None):
- request = prepare_request(next_link)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- return pipeline_response
-
- return ItemPaged(
- get_next, extract_data
- )
- list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore
-
- def query_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- continuation_token_parameter=None, # type: Optional[str]
- parent_trigger_name=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.TriggerQueryResponse"
- """Query triggers.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun
- triggers.
- :type parent_trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore
-
- def create_or_update(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- properties, # type: "models.Trigger"
- if_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.TriggerResource"
- """Creates or updates a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param properties: Properties of the trigger.
- :type properties: ~data_factory_management_client.models.Trigger
- :param if_match: ETag of the trigger entity. Should only be specified for update, for which it
- should match existing entity or can be * for unconditional update.
- :type if_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerResource
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- trigger = models.TriggerResource(properties=properties)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.create_or_update.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_match is not None:
- header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(trigger, 'TriggerResource')
- body_content_kwargs['content'] = body_content
- request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore
-
- def get(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- if_none_match=None, # type: Optional[str]
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.TriggerResource"]
- """Gets a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag
- matches the existing entity tag, or if * was provided, then no content will be returned.
- :type if_none_match: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerResource, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerResource or None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- if if_none_match is not None:
- header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.get(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 304]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('TriggerResource', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore
-
- def delete(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Deletes a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.delete.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.delete(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 204]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore
-
- def _subscribe_to_event_initial(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"]
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._subscribe_to_event_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore
-
- def begin_subscribe_to_event(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"]
- """Subscribe event trigger to events.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response)
- :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = self._subscribe_to_event_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = NoPolling()
- else: polling_method = polling
- if cont_token:
- return LROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore
-
- def get_event_subscription_status(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> "models.TriggerSubscriptionOperationStatus"
- """Get a trigger's event subscription status.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerSubscriptionOperationStatus, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerSubscriptionOperationStatus
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.get_event_subscription_status.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore
-
- def _unsubscribe_from_event_initial(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"]
- cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Accept'] = 'application/json'
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200, 202]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = None
- if response.status_code == 200:
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore
-
- def begin_unsubscribe_from_event(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> LROPoller["models.TriggerSubscriptionOperationStatus"]
- """Unsubscribe event trigger from events.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of LROPoller that returns either TriggerSubscriptionOperationStatus or the result of cls(response)
- :rtype: ~azure.core.polling.LROPoller[~data_factory_management_client.models.TriggerSubscriptionOperationStatus]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = self._unsubscribe_from_event_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
- return deserialized
-
- if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = NoPolling()
- else: polling_method = polling
- if cont_token:
- return LROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore
-
- def _start_initial(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._start_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore
-
- def begin_start(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> LROPoller[None]
- """Starts a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of LROPoller that returns either None or the result of cls(response)
- :rtype: ~azure.core.polling.LROPoller[None]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = self._start_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- if cls:
- return cls(pipeline_response, None, {})
-
- if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = NoPolling()
- else: polling_method = polling
- if cont_token:
- return LROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore
-
- def _stop_initial(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self._stop_initial.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore
-
- def begin_stop(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> LROPoller[None]
- """Stops a trigger.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :keyword str continuation_token: A continuation token to restart a poller from a saved state.
- :keyword polling: True for ARMPolling, False for no polling, or a
- polling object for personal polling strategy
- :paramtype polling: bool or ~azure.core.polling.PollingMethod
- :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
- :return: An instance of LROPoller that returns either None or the result of cls(response)
- :rtype: ~azure.core.polling.LROPoller[None]
- :raises ~azure.core.exceptions.HttpResponseError:
- """
- polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- lro_delay = kwargs.pop(
- 'polling_interval',
- self._config.polling_interval
- )
- cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
- if cont_token is None:
- raw_result = self._stop_initial(
- resource_group_name=resource_group_name,
- factory_name=factory_name,
- trigger_name=trigger_name,
- cls=lambda x,y,z: x,
- **kwargs
- )
-
- kwargs.pop('error_map', None)
- kwargs.pop('content_type', None)
-
- def get_long_running_output(pipeline_response):
- if cls:
- return cls(pipeline_response, None, {})
-
- if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
- elif polling is False: polling_method = NoPolling()
- else: polling_method = polling
- if cont_token:
- return LROPoller.from_continuation_token(
- polling_method=polling_method,
- continuation_token=cont_token,
- client=self._client,
- deserialization_callback=get_long_running_output
- )
- else:
- return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
- begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore
diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py
deleted file mode 100644
index 3290d8196ab..00000000000
--- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py
+++ /dev/null
@@ -1,248 +0,0 @@
-# coding=utf-8
-# --------------------------------------------------------------------------
-# Copyright (c) Microsoft Corporation. All rights reserved.
-# Licensed under the MIT License. See License.txt in the project root for license information.
-# Code generated by Microsoft (R) AutoRest Code Generator.
-# Changes may cause incorrect behavior and will be lost if the code is regenerated.
-# --------------------------------------------------------------------------
-import datetime
-from typing import TYPE_CHECKING
-import warnings
-
-from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
-from azure.core.pipeline import PipelineResponse
-from azure.core.pipeline.transport import HttpRequest, HttpResponse
-from azure.mgmt.core.exceptions import ARMErrorFormat
-
-from .. import models
-
-if TYPE_CHECKING:
- # pylint: disable=unused-import,ungrouped-imports
- from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
-
- T = TypeVar('T')
- ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
-
-class TriggerRunOperations(object):
- """TriggerRunOperations operations.
-
- You should not instantiate this class directly. Instead, you should create a Client instance that
- instantiates it for you and attaches it as an attribute.
-
- :ivar models: Alias to model classes used in this operation group.
- :type models: ~data_factory_management_client.models
- :param client: Client for service requests.
- :param config: Configuration of service client.
- :param serializer: An object model serializer.
- :param deserializer: An object model deserializer.
- """
-
- models = models
-
- def __init__(self, client, config, serializer, deserializer):
- self._client = client
- self._serialize = serializer
- self._deserialize = deserializer
- self._config = config
-
- def rerun(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- run_id, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Rerun single trigger instance by runId.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.rerun.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- rerun.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/rerun'} # type: ignore
-
- def cancel(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- trigger_name, # type: str
- run_id, # type: str
- **kwargs # type: Any
- ):
- # type: (...) -> None
- """Cancel a single trigger instance by runId.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param trigger_name: The trigger name.
- :type trigger_name: str
- :param run_id: The pipeline run identifier.
- :type run_id: str
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: None, or the result of cls(response)
- :rtype: None
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType[None]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
- api_version = "2018-06-01"
-
- # Construct URL
- url = self.cancel.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'),
- 'runId': self._serialize.url("run_id", run_id, 'str'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
-
- request = self._client.post(url, query_parameters, header_parameters)
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- if cls:
- return cls(pipeline_response, None, {})
-
- cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/triggerRuns/{runId}/cancel'} # type: ignore
-
- def query_by_factory(
- self,
- resource_group_name, # type: str
- factory_name, # type: str
- last_updated_after, # type: datetime.datetime
- last_updated_before, # type: datetime.datetime
- continuation_token_parameter=None, # type: Optional[str]
- filters=None, # type: Optional[List["models.RunQueryFilter"]]
- order_by=None, # type: Optional[List["models.RunQueryOrderBy"]]
- **kwargs # type: Any
- ):
- # type: (...) -> "models.TriggerRunsQueryResponse"
- """Query trigger runs.
-
- :param resource_group_name: The resource group name.
- :type resource_group_name: str
- :param factory_name: The factory name.
- :type factory_name: str
- :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_after: ~datetime.datetime
- :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601'
- format.
- :type last_updated_before: ~datetime.datetime
- :param continuation_token_parameter: The continuation token for getting the next page of
- results. Null for first page.
- :type continuation_token_parameter: str
- :param filters: List of filters.
- :type filters: list[~data_factory_management_client.models.RunQueryFilter]
- :param order_by: List of OrderBy option.
- :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy]
- :keyword callable cls: A custom type or function that will be passed the direct response
- :return: TriggerRunsQueryResponse, or the result of cls(response)
- :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse
- :raises: ~azure.core.exceptions.HttpResponseError
- """
- cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"]
- error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
- error_map.update(kwargs.pop('error_map', {}))
-
- filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by)
- api_version = "2018-06-01"
- content_type = kwargs.pop("content_type", "application/json")
-
- # Construct URL
- url = self.query_by_factory.metadata['url'] # type: ignore
- path_format_arguments = {
- 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
- 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
- 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'),
- }
- url = self._client.format_url(url, **path_format_arguments)
-
- # Construct parameters
- query_parameters = {} # type: Dict[str, Any]
- query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
-
- # Construct headers
- header_parameters = {} # type: Dict[str, Any]
- header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
- header_parameters['Accept'] = 'application/json'
-
- body_content_kwargs = {} # type: Dict[str, Any]
- body_content = self._serialize.body(filter_parameters, 'RunFilterParameters')
- body_content_kwargs['content'] = body_content
- request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
-
- pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
- response = pipeline_response.http_response
-
- if response.status_code not in [200]:
- map_error(status_code=response.status_code, response=response, error_map=error_map)
- raise HttpResponseError(response=response, error_format=ARMErrorFormat)
-
- deserialized = self._deserialize('TriggerRunsQueryResponse', pipeline_response)
-
- if cls:
- return cls(pipeline_response, deserialized, {})
-
- return deserialized
- query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryTriggerRuns'} # type: ignore
diff --git a/src/datafactory/gen.zip b/src/datafactory/gen.zip
deleted file mode 100644
index 296cd2dfd07..00000000000
Binary files a/src/datafactory/gen.zip and /dev/null differ
diff --git a/src/datafactory/linter_exclusions.yml b/src/datafactory/linter_exclusions.yml
deleted file mode 100644
index cdfa831be54..00000000000
--- a/src/datafactory/linter_exclusions.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-datafactory get-git-hub-access-token:
- parameters:
- git_hub_access_token_base_url:
- rule_exclusions:
- - option_length_too_long
diff --git a/src/datafactory/report.md b/src/datafactory/report.md
index 1d9bdfb4cf1..4db67edb6ea 100644
--- a/src/datafactory/report.md
+++ b/src/datafactory/report.md
@@ -19,6 +19,9 @@
|az datafactory activity-run|ActivityRuns|[commands](#CommandsInActivityRuns)|
|az datafactory trigger|Triggers|[commands](#CommandsInTriggers)|
|az datafactory trigger-run|TriggerRuns|[commands](#CommandsInTriggerRuns)|
+|az datafactory private-end-point-connection|privateEndPointConnections|[commands](#CommandsInprivateEndPointConnections)|
+|az datafactory private-endpoint-connection|PrivateEndpointConnection|[commands](#CommandsInPrivateEndpointConnection)|
+|az datafactory private-link-resource|privateLinkResources|[commands](#CommandsInprivateLinkResources)|
## COMMANDS
### Commands in `az datafactory` group
@@ -103,6 +106,24 @@
|[az datafactory pipeline-run cancel](#PipelineRunsCancel)|Cancel|[Parameters](#ParametersPipelineRunsCancel)|[Example](#ExamplesPipelineRunsCancel)|
|[az datafactory pipeline-run query-by-factory](#PipelineRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersPipelineRunsQueryByFactory)|[Example](#ExamplesPipelineRunsQueryByFactory)|
+### Commands in `az datafactory private-end-point-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datafactory private-end-point-connection list](#privateEndPointConnectionsListByFactory)|ListByFactory|[Parameters](#ParametersprivateEndPointConnectionsListByFactory)|[Example](#ExamplesprivateEndPointConnectionsListByFactory)|
+
+### Commands in `az datafactory private-endpoint-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datafactory private-endpoint-connection show](#PrivateEndpointConnectionGet)|Get|[Parameters](#ParametersPrivateEndpointConnectionGet)|[Example](#ExamplesPrivateEndpointConnectionGet)|
+|[az datafactory private-endpoint-connection create](#PrivateEndpointConnectionCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersPrivateEndpointConnectionCreateOrUpdate#Create)|[Example](#ExamplesPrivateEndpointConnectionCreateOrUpdate#Create)|
+|[az datafactory private-endpoint-connection update](#PrivateEndpointConnectionCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersPrivateEndpointConnectionCreateOrUpdate#Update)|Not Found|
+|[az datafactory private-endpoint-connection delete](#PrivateEndpointConnectionDelete)|Delete|[Parameters](#ParametersPrivateEndpointConnectionDelete)|[Example](#ExamplesPrivateEndpointConnectionDelete)|
+
+### Commands in `az datafactory private-link-resource` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az datafactory private-link-resource show](#privateLinkResourcesGet)|Get|[Parameters](#ParametersprivateLinkResourcesGet)|[Example](#ExamplesprivateLinkResourcesGet)|
+
### Commands in `az datafactory trigger` group
|CLI Command|Operation Swagger name|Parameters|Examples|
|---------|------------|--------|-----------|
@@ -179,6 +200,13 @@ az datafactory create --location "East US" --name "exampleFactoryName" --resourc
|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration|
|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration|
|**--global-parameters**|dictionary|List of parameters for factory.|global_parameters|globalParameters|
+|**--public-network-access**|choice|Whether or not public network access is allowed for the data factory.|public_network_access|publicNetworkAccess|
+|**--key-name**|string|The name of the key in Azure Key Vault to use as Customer Managed Key.|key_name|keyName|
+|**--vault-base-url**|string|The url of the Azure Key Vault used for CMK.|vault_base_url|vaultBaseUrl|
+|**--key-version**|string|The version of the key used for CMK. If not provided, latest version will be used.|key_version|keyVersion|
+|**--identity**|object|User assigned identity to use to authenticate to customer's key vault. If not provided Managed Service Identity will be used.|identity|identity|
+|**--type**|choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|List of user assigned identities for the factory.|user_assigned_identities|userAssignedIdentities|
#### Command `az datafactory update`
@@ -193,6 +221,8 @@ az datafactory update --name "exampleFactoryName" --tags exampleTag="exampleValu
|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
|**--factory-name**|string|The factory name.|factory_name|factoryName|
|**--tags**|dictionary|The resource tags.|tags|tags|
+|**--type**|choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|List of user assigned identities for the factory.|user_assigned_identities|userAssignedIdentities|
#### Command `az datafactory delete`
@@ -257,6 +287,7 @@ az datafactory get-git-hub-access-token --name "exampleFactoryName" --git-hub-ac
|**--git-hub-access-code**|string|GitHub access code.|git_hub_access_code|gitHubAccessCode|
|**--git-hub-access-token-base-url**|string|GitHub access token base URL.|git_hub_access_token_base_url|gitHubAccessTokenBaseUrl|
|**--git-hub-client-id**|string|GitHub application client ID.|git_hub_client_id|gitHubClientId|
+|**--git-hub-client-secret**|object|GitHub bring your own app client secret information.|git_hub_client_secret|gitHubClientSecret|
### group `az datafactory activity-run`
#### Command `az datafactory activity-run query-by-pipeline-run`
@@ -423,6 +454,7 @@ az datafactory integration-runtime linked-integration-runtime create --name "bfa
|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName|
|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match|
|**--description**|string|Integration runtime description.|managed_description|description|
+|**--managed-virtual-network**|object|Managed Virtual Network reference.|managed_managed_virtual_network|managedVirtualNetwork|
|**--compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties|computeProperties|
|**--ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties|ssisProperties|
@@ -823,7 +855,7 @@ et\\"}],\\"outputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"M
"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"Bl\
obSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.\
OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration "0.00:10:00" \
---name "examplePipeline" --resource-group "exampleResourceGroup"
+--pipeline-name "examplePipeline" --resource-group "exampleResourceGroup"
```
##### Parameters
|Option|Type|Description|Path (SDK)|Swagger name|
@@ -840,7 +872,7 @@ OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\
|**--annotations**|array|List of tags that can be used for describing the Pipeline.|annotations|annotations|
|**--run-dimensions**|dictionary|Dimensions emitted by Pipeline.|run_dimensions|runDimensions|
|**--duration**|any|TimeSpan value, after which an Azure Monitoring Metric is fired.|duration|duration|
-|**--folder-name**|string|The name of the folder that this Pipeline is in.|folder_name|name|
+|**--name**|string|The name of the folder that this Pipeline is in.|name|name|
#### Command `az datafactory pipeline delete`
@@ -924,6 +956,91 @@ operator="Equals" values="examplePipeline" --last-updated-after "2018-06-16T00:3
|**--filters**|array|List of filters.|filters|filters|
|**--order-by**|array|List of OrderBy option.|order_by|orderBy|
+### group `az datafactory private-end-point-connection`
+#### Command `az datafactory private-end-point-connection list`
+
+##### Example
+```
+az datafactory private-end-point-connection list --factory-name "exampleFactoryName" --resource-group \
+"exampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+
+### group `az datafactory private-endpoint-connection`
+#### Command `az datafactory private-endpoint-connection show`
+
+##### Example
+```
+az datafactory private-endpoint-connection show --factory-name "exampleFactoryName" --name "connection" \
+--resource-group "exampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--if-none-match**|string|ETag of the private endpoint connection entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match|
+
+#### Command `az datafactory private-endpoint-connection create`
+
+##### Example
+```
+az datafactory private-endpoint-connection create --factory-name "exampleFactoryName" --name "connection" \
+--private-link-service-connection-state description="Approved by admin." actions-required="" status="Approved" \
+--resource-group "exampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--if-match**|string|ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match|
+|**--private-link-service-connection-state**|object|The state of a private link connection|private_link_service_connection_state|privateLinkServiceConnectionState|
+
+#### Command `az datafactory private-endpoint-connection update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--if-match**|string|ETag of the private endpoint connection entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match|
+|**--private-link-service-connection-state**|object|The state of a private link connection|private_link_service_connection_state|privateLinkServiceConnectionState|
+
+#### Command `az datafactory private-endpoint-connection delete`
+
+##### Example
+```
+az datafactory private-endpoint-connection delete --factory-name "exampleFactoryName" --name "connection" \
+--resource-group "exampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+|**--private-endpoint-connection-name**|string|The private endpoint connection name.|private_endpoint_connection_name|privateEndpointConnectionName|
+
+### group `az datafactory private-link-resource`
+#### Command `az datafactory private-link-resource show`
+
+##### Example
+```
+az datafactory private-link-resource show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName|
+|**--factory-name**|string|The factory name.|factory_name|factoryName|
+
### group `az datafactory trigger`
#### Command `az datafactory trigger list`