diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py index 14cee0777347..ec8185523fbd 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py @@ -27,6 +27,8 @@ from .operations import TriggersOperations from .operations import TriggerRunsOperations from .operations import RerunTriggersOperations +from .operations import DataFlowsOperations +from .operations import DataFlowDebugSessionOperations from . import models @@ -64,6 +66,10 @@ class DataFactoryManagementClient(SDKClient): :vartype trigger_runs: azure.mgmt.datafactory.operations.TriggerRunsOperations :ivar rerun_triggers: RerunTriggers operations :vartype rerun_triggers: azure.mgmt.datafactory.operations.RerunTriggersOperations + :ivar data_flows: DataFlows operations + :vartype data_flows: azure.mgmt.datafactory.operations.DataFlowsOperations + :ivar data_flow_debug_session: DataFlowDebugSession operations + :vartype data_flow_debug_session: azure.mgmt.datafactory.operations.DataFlowDebugSessionOperations :param credentials: Credentials needed for the client to connect to Azure. :type credentials: :mod:`A msrestazure Credentials @@ -112,3 +118,7 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.rerun_triggers = RerunTriggersOperations( self._client, self.config, self._serialize, self._deserialize) + self.data_flows = DataFlowsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.data_flow_debug_session = DataFlowDebugSessionOperations( + self._client, self.config, self._serialize, self._deserialize) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 15697b527360..d947b8055c7b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -16,6 +16,7 @@ from ._models_py3 import ActivityPolicy from ._models_py3 import ActivityRun from ._models_py3 import ActivityRunsQueryResponse + from ._models_py3 import AddDataFlowToDebugSessionResponse from ._models_py3 import AmazonMWSLinkedService from ._models_py3 import AmazonMWSObjectDataset from ._models_py3 import AmazonMWSSource @@ -124,9 +125,14 @@ from ._models_py3 import CosmosDbMongoDbApiLinkedService from ._models_py3 import CosmosDbMongoDbApiSink from ._models_py3 import CosmosDbMongoDbApiSource + from ._models_py3 import CosmosDbSqlApiCollectionDataset + from ._models_py3 import CosmosDbSqlApiSink + from ._models_py3 import CosmosDbSqlApiSource from ._models_py3 import CouchbaseLinkedService from ._models_py3 import CouchbaseSource from ._models_py3 import CouchbaseTableDataset + from ._models_py3 import CreateDataFlowDebugSessionRequest + from ._models_py3 import CreateDataFlowDebugSessionResponse from ._models_py3 import CreateLinkedIntegrationRuntimeRequest from ._models_py3 import CreateRunResponse from ._models_py3 import CustomActivity @@ -137,6 +143,20 @@ from ._models_py3 import DatabricksNotebookActivity from ._models_py3 import DatabricksSparkJarActivity from ._models_py3 import DatabricksSparkPythonActivity + from ._models_py3 import DataFlow + from ._models_py3 import DataFlowDebugCommandPayload + from ._models_py3 import DataFlowDebugCommandRequest + from ._models_py3 import DataFlowDebugCommandResponse + from ._models_py3 import DataFlowDebugPackage + from ._models_py3 import DataFlowDebugPackageDebugSettings + from ._models_py3 import DataFlowDebugSessionInfo + from ._models_py3 import DataFlowFolder + from ._models_py3 import DataFlowReference + from ._models_py3 import DataFlowResource + from ._models_py3 import DataFlowSink + from ._models_py3 import DataFlowSource + from ._models_py3 import DataFlowSourceSetting + from ._models_py3 import DataFlowStagingInfo from ._models_py3 import DataLakeAnalyticsUSQLActivity from ._models_py3 import Dataset from ._models_py3 import DatasetBZip2Compression @@ -153,6 +173,7 @@ from ._models_py3 import Db2Source from ._models_py3 import Db2TableDataset from ._models_py3 import DeleteActivity + from ._models_py3 import DeleteDataFlowDebugSessionRequest from ._models_py3 import DelimitedTextDataset from ._models_py3 import DelimitedTextReadSettings from ._models_py3 import DelimitedTextSink @@ -182,6 +203,7 @@ from ._models_py3 import EloquaSource from ._models_py3 import EntityReference from ._models_py3 import EnvironmentVariableSetup + from ._models_py3 import ExecuteDataFlowActivity from ._models_py3 import ExecutePipelineActivity from ._models_py3 import ExecuteSSISPackageActivity from ._models_py3 import ExecutionActivity @@ -209,6 +231,7 @@ from ._models_py3 import FtpReadSettings from ._models_py3 import FtpServerLinkedService from ._models_py3 import FtpServerLocation + from ._models_py3 import GetDataFactoryOperationStatusResponse from ._models_py3 import GetMetadataActivity from ._models_py3 import GetSsisObjectMetadataRequest from ._models_py3 import GitHubAccessTokenRequest @@ -299,6 +322,7 @@ from ._models_py3 import ManagedIntegrationRuntimeNode from ._models_py3 import ManagedIntegrationRuntimeOperationResult from ._models_py3 import ManagedIntegrationRuntimeStatus + from ._models_py3 import MappingDataFlow from ._models_py3 import MariaDBLinkedService from ._models_py3 import MariaDBSource from ._models_py3 import MariaDBTableDataset @@ -491,11 +515,13 @@ from ._models_py3 import SybaseLinkedService from ._models_py3 import SybaseSource from ._models_py3 import SybaseTableDataset + from ._models_py3 import TabularSource from ._models_py3 import TeradataLinkedService from ._models_py3 import TeradataPartitionSettings from ._models_py3 import TeradataSource from ._models_py3 import TeradataTableDataset from ._models_py3 import TextFormat + from ._models_py3 import Transformation from ._models_py3 import Trigger from ._models_py3 import TriggerDependencyReference from ._models_py3 import TriggerPipelineReference @@ -540,6 +566,7 @@ from ._models import ActivityPolicy from ._models import ActivityRun from ._models import ActivityRunsQueryResponse + from ._models import AddDataFlowToDebugSessionResponse from ._models import AmazonMWSLinkedService from ._models import AmazonMWSObjectDataset from ._models import AmazonMWSSource @@ -648,9 +675,14 @@ from ._models import CosmosDbMongoDbApiLinkedService from ._models import CosmosDbMongoDbApiSink from ._models import CosmosDbMongoDbApiSource + from ._models import CosmosDbSqlApiCollectionDataset + from ._models import CosmosDbSqlApiSink + from ._models import CosmosDbSqlApiSource from ._models import CouchbaseLinkedService from ._models import CouchbaseSource from ._models import CouchbaseTableDataset + from ._models import CreateDataFlowDebugSessionRequest + from ._models import CreateDataFlowDebugSessionResponse from ._models import CreateLinkedIntegrationRuntimeRequest from ._models import CreateRunResponse from ._models import CustomActivity @@ -661,6 +693,20 @@ from ._models import DatabricksNotebookActivity from ._models import DatabricksSparkJarActivity from ._models import DatabricksSparkPythonActivity + from ._models import DataFlow + from ._models import DataFlowDebugCommandPayload + from ._models import DataFlowDebugCommandRequest + from ._models import DataFlowDebugCommandResponse + from ._models import DataFlowDebugPackage + from ._models import DataFlowDebugPackageDebugSettings + from ._models import DataFlowDebugSessionInfo + from ._models import DataFlowFolder + from ._models import DataFlowReference + from ._models import DataFlowResource + from ._models import DataFlowSink + from ._models import DataFlowSource + from ._models import DataFlowSourceSetting + from ._models import DataFlowStagingInfo from ._models import DataLakeAnalyticsUSQLActivity from ._models import Dataset from ._models import DatasetBZip2Compression @@ -677,6 +723,7 @@ from ._models import Db2Source from ._models import Db2TableDataset from ._models import DeleteActivity + from ._models import DeleteDataFlowDebugSessionRequest from ._models import DelimitedTextDataset from ._models import DelimitedTextReadSettings from ._models import DelimitedTextSink @@ -706,6 +753,7 @@ from ._models import EloquaSource from ._models import EntityReference from ._models import EnvironmentVariableSetup + from ._models import ExecuteDataFlowActivity from ._models import ExecutePipelineActivity from ._models import ExecuteSSISPackageActivity from ._models import ExecutionActivity @@ -733,6 +781,7 @@ from ._models import FtpReadSettings from ._models import FtpServerLinkedService from ._models import FtpServerLocation + from ._models import GetDataFactoryOperationStatusResponse from ._models import GetMetadataActivity from ._models import GetSsisObjectMetadataRequest from ._models import GitHubAccessTokenRequest @@ -823,6 +872,7 @@ from ._models import ManagedIntegrationRuntimeNode from ._models import ManagedIntegrationRuntimeOperationResult from ._models import ManagedIntegrationRuntimeStatus + from ._models import MappingDataFlow from ._models import MariaDBLinkedService from ._models import MariaDBSource from ._models import MariaDBTableDataset @@ -1015,11 +1065,13 @@ from ._models import SybaseLinkedService from ._models import SybaseSource from ._models import SybaseTableDataset + from ._models import TabularSource from ._models import TeradataLinkedService from ._models import TeradataPartitionSettings from ._models import TeradataSource from ._models import TeradataTableDataset from ._models import TextFormat + from ._models import Transformation from ._models import Trigger from ._models import TriggerDependencyReference from ._models import TriggerPipelineReference @@ -1057,6 +1109,8 @@ from ._models import ZohoLinkedService from ._models import ZohoObjectDataset from ._models import ZohoSource +from ._paged_models import DataFlowDebugSessionInfoPaged +from ._paged_models import DataFlowResourcePaged from ._paged_models import DatasetResourcePaged from ._paged_models import FactoryPaged from ._paged_models import IntegrationRuntimeResourcePaged @@ -1078,6 +1132,7 @@ RunQueryOrderByField, RunQueryOrder, TriggerRunStatus, + DataFlowDebugCommandType, TumblingWindowFrequency, BlobEventTypes, DayOfWeek, @@ -1112,13 +1167,13 @@ AvroCompressionCodec, AzureFunctionActivityMethod, WebActivityMethod, + OraclePartitionOption, + SalesforceSourceReadBehavior, NetezzaPartitionOption, CassandraSourceReadConsistencyLevels, TeradataPartitionOption, - OraclePartitionOption, StoredProcedureParameterType, SapTablePartitionOption, - SalesforceSourceReadBehavior, SsisPackageLocationType, HDInsightActivityDebugInfoOption, SalesforceSinkWriteBehavior, @@ -1147,6 +1202,7 @@ 'ActivityPolicy', 'ActivityRun', 'ActivityRunsQueryResponse', + 'AddDataFlowToDebugSessionResponse', 'AmazonMWSLinkedService', 'AmazonMWSObjectDataset', 'AmazonMWSSource', @@ -1255,9 +1311,14 @@ 'CosmosDbMongoDbApiLinkedService', 'CosmosDbMongoDbApiSink', 'CosmosDbMongoDbApiSource', + 'CosmosDbSqlApiCollectionDataset', + 'CosmosDbSqlApiSink', + 'CosmosDbSqlApiSource', 'CouchbaseLinkedService', 'CouchbaseSource', 'CouchbaseTableDataset', + 'CreateDataFlowDebugSessionRequest', + 'CreateDataFlowDebugSessionResponse', 'CreateLinkedIntegrationRuntimeRequest', 'CreateRunResponse', 'CustomActivity', @@ -1268,6 +1329,20 @@ 'DatabricksNotebookActivity', 'DatabricksSparkJarActivity', 'DatabricksSparkPythonActivity', + 'DataFlow', + 'DataFlowDebugCommandPayload', + 'DataFlowDebugCommandRequest', + 'DataFlowDebugCommandResponse', + 'DataFlowDebugPackage', + 'DataFlowDebugPackageDebugSettings', + 'DataFlowDebugSessionInfo', + 'DataFlowFolder', + 'DataFlowReference', + 'DataFlowResource', + 'DataFlowSink', + 'DataFlowSource', + 'DataFlowSourceSetting', + 'DataFlowStagingInfo', 'DataLakeAnalyticsUSQLActivity', 'Dataset', 'DatasetBZip2Compression', @@ -1284,6 +1359,7 @@ 'Db2Source', 'Db2TableDataset', 'DeleteActivity', + 'DeleteDataFlowDebugSessionRequest', 'DelimitedTextDataset', 'DelimitedTextReadSettings', 'DelimitedTextSink', @@ -1313,6 +1389,7 @@ 'EloquaSource', 'EntityReference', 'EnvironmentVariableSetup', + 'ExecuteDataFlowActivity', 'ExecutePipelineActivity', 'ExecuteSSISPackageActivity', 'ExecutionActivity', @@ -1340,6 +1417,7 @@ 'FtpReadSettings', 'FtpServerLinkedService', 'FtpServerLocation', + 'GetDataFactoryOperationStatusResponse', 'GetMetadataActivity', 'GetSsisObjectMetadataRequest', 'GitHubAccessTokenRequest', @@ -1430,6 +1508,7 @@ 'ManagedIntegrationRuntimeNode', 'ManagedIntegrationRuntimeOperationResult', 'ManagedIntegrationRuntimeStatus', + 'MappingDataFlow', 'MariaDBLinkedService', 'MariaDBSource', 'MariaDBTableDataset', @@ -1622,11 +1701,13 @@ 'SybaseLinkedService', 'SybaseSource', 'SybaseTableDataset', + 'TabularSource', 'TeradataLinkedService', 'TeradataPartitionSettings', 'TeradataSource', 'TeradataTableDataset', 'TextFormat', + 'Transformation', 'Trigger', 'TriggerDependencyReference', 'TriggerPipelineReference', @@ -1672,6 +1753,8 @@ 'PipelineResourcePaged', 'TriggerResourcePaged', 'RerunTriggerResourcePaged', + 'DataFlowResourcePaged', + 'DataFlowDebugSessionInfoPaged', 'IntegrationRuntimeState', 'IntegrationRuntimeAutoUpdate', 'ParameterType', @@ -1684,6 +1767,7 @@ 'RunQueryOrderByField', 'RunQueryOrder', 'TriggerRunStatus', + 'DataFlowDebugCommandType', 'TumblingWindowFrequency', 'BlobEventTypes', 'DayOfWeek', @@ -1718,13 +1802,13 @@ 'AvroCompressionCodec', 'AzureFunctionActivityMethod', 'WebActivityMethod', + 'OraclePartitionOption', + 'SalesforceSourceReadBehavior', 'NetezzaPartitionOption', 'CassandraSourceReadConsistencyLevels', 'TeradataPartitionOption', - 'OraclePartitionOption', 'StoredProcedureParameterType', 'SapTablePartitionOption', - 'SalesforceSourceReadBehavior', 'SsisPackageLocationType', 'HDInsightActivityDebugInfoOption', 'SalesforceSinkWriteBehavior', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 8b3e08f32768..052712638b66 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -124,6 +124,13 @@ class TriggerRunStatus(str, Enum): inprogress = "Inprogress" +class DataFlowDebugCommandType(str, Enum): + + execute_preview_query = "executePreviewQuery" + execute_statistics_query = "executeStatisticsQuery" + execute_expression_query = "executeExpressionQuery" + + class TumblingWindowFrequency(str, Enum): minute = "Minute" @@ -370,6 +377,19 @@ class WebActivityMethod(str, Enum): delete = "DELETE" +class OraclePartitionOption(str, Enum): + + none = "None" + physical_partitions_of_table = "PhysicalPartitionsOfTable" + dynamic_range = "DynamicRange" + + +class SalesforceSourceReadBehavior(str, Enum): + + query = "Query" + query_all = "QueryAll" + + class NetezzaPartitionOption(str, Enum): none = "None" @@ -398,13 +418,6 @@ class TeradataPartitionOption(str, Enum): dynamic_range = "DynamicRange" -class OraclePartitionOption(str, Enum): - - none = "None" - physical_partitions_of_table = "PhysicalPartitionsOfTable" - dynamic_range = "DynamicRange" - - class StoredProcedureParameterType(str, Enum): string = "String" @@ -426,12 +439,6 @@ class SapTablePartitionOption(str, Enum): partition_on_time = "PartitionOnTime" -class SalesforceSourceReadBehavior(str, Enum): - - query = "Query" - query_all = "QueryAll" - - class SsisPackageLocationType(str, Enum): ssisdb = "SSISDB" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index a94e19b202ea..b7d729eb84dc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -286,6 +286,22 @@ def __init__(self, **kwargs): self.continuation_token = kwargs.get('continuation_token', None) +class AddDataFlowToDebugSessionResponse(Model): + """Response body structure for starting data flow debug session. + + :param job_version: The ID of data flow debug job version. + :type job_version: str + """ + + _attribute_map = { + 'job_version': {'key': 'jobVersion', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AddDataFlowToDebugSessionResponse, self).__init__(**kwargs) + self.job_version = kwargs.get('job_version', None) + + class LinkedService(Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute @@ -501,10 +517,11 @@ class Dataset(Model): MongoDbCollectionDataset, FileShareDataset, Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, - DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, - CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, - AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, - OrcDataset, JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, + CosmosDbSqlApiCollectionDataset, CustomDataset, CassandraTableDataset, + AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, + AzureTableDataset, AzureBlobDataset, BinaryDataset, OrcDataset, + JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -555,7 +572,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): @@ -634,6 +651,64 @@ def __init__(self, **kwargs): class CopySource(Model): """A copy activity source. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, + Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, + WebSource, OracleSource, AzureDataExplorerSource, HdfsSource, + FileSystemSource, RestSource, SalesforceServiceCloudSource, ODataSource, + MicrosoftAccessSource, RelationalSource, CommonDataServiceForAppsSource, + DynamicsCrmSource, DynamicsSource, CosmosDbSqlApiSource, + DocumentDbCollectionSource, BlobSource, TabularSource, BinarySource, + OrcSource, JsonSource, DelimitedTextSource, ParquetSource, AvroSource + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + } + + def __init__(self, **kwargs): + super(CopySource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.source_retry_count = kwargs.get('source_retry_count', None) + self.source_retry_wait = kwargs.get('source_retry_wait', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.type = None + + +class TabularSource(CopySource): + """Copy activity sources of tabular type. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, @@ -643,19 +718,12 @@ class CopySource(Model): AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, HubspotSource, HiveSource, HBaseSource, GreenplumSource, GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, - ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, - CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, - WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, - AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, - AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, - SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, - SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, - SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, - MicrosoftAccessSource, InformixSource, RelationalSource, - CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, - DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, - OrcSource, JsonSource, DelimitedTextSource, ParquetSource, AvroSource + ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, CassandraSource, + TeradataSource, AzureMySqlSource, SqlDWSource, SqlMISource, AzureSqlSource, + SqlServerSource, SqlSource, SapTableSource, SapOpenHubSource, + SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, + SapBwSource, SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, + Db2Source, InformixSource, AzureTableSource All required parameters must be populated in order to send to Azure. @@ -675,6 +743,10 @@ class CopySource(Model): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object """ _validation = { @@ -687,22 +759,20 @@ class CopySource(Model): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'CassandraSource': 'CassandraSource', 'TeradataSource': 'TeradataSource', 'AzureMySqlSource': 'AzureMySqlSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'InformixSource': 'InformixSource', 'AzureTableSource': 'AzureTableSource'} } def __init__(self, **kwargs): - super(CopySource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.source_retry_count = kwargs.get('source_retry_count', None) - self.source_retry_wait = kwargs.get('source_retry_wait', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.type = None + super(TabularSource, self).__init__(**kwargs) + self.query_timeout = kwargs.get('query_timeout', None) + self.type = 'TabularSource' -class AmazonMWSSource(CopySource): +class AmazonMWSSource(TabularSource): """A copy activity Amazon Marketplace Web Service source. All required parameters must be populated in order to send to Azure. @@ -723,6 +793,10 @@ class AmazonMWSSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -738,6 +812,7 @@ class AmazonMWSSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -821,7 +896,7 @@ def __init__(self, **kwargs): self.type = 'AmazonRedshift' -class AmazonRedshiftSource(CopySource): +class AmazonRedshiftSource(TabularSource): """A copy activity source for Amazon Redshift Source. All required parameters must be populated in order to send to Azure. @@ -842,6 +917,10 @@ class AmazonRedshiftSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -863,6 +942,7 @@ class AmazonRedshiftSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } @@ -1538,8 +1618,8 @@ class CopySink(Model): DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, - ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, + CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, + BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, OrcSink, JsonSink, DelimitedTextSink @@ -1585,7 +1665,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): @@ -2730,11 +2810,12 @@ class ExecutionActivity(Activity): """Base class for all execution activities. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, - DatabricksSparkJarActivity, DatabricksNotebookActivity, - DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, - AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, + sub-classes are: ExecuteDataFlowActivity, AzureFunctionActivity, + DatabricksSparkPythonActivity, DatabricksSparkJarActivity, + DatabricksNotebookActivity, DataLakeAnalyticsUSQLActivity, + AzureMLUpdateResourceActivity, AzureMLBatchExecutionActivity, + GetMetadataActivity, WebActivity, LookupActivity, + AzureDataExplorerCommandActivity, DeleteActivity, SqlServerStoredProcedureActivity, CustomActivity, ExecuteSSISPackageActivity, HDInsightSparkActivity, HDInsightStreamingActivity, HDInsightMapReduceActivity, @@ -2779,7 +2860,7 @@ class ExecutionActivity(Activity): } _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + 'type': {'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} } def __init__(self, **kwargs): @@ -3885,7 +3966,7 @@ def __init__(self, **kwargs): self.type = 'AzureMariaDB' -class AzureMariaDBSource(CopySource): +class AzureMariaDBSource(TabularSource): """A copy activity Azure MariaDB source. All required parameters must be populated in order to send to Azure. @@ -3906,6 +3987,10 @@ class AzureMariaDBSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -3921,6 +4006,7 @@ class AzureMariaDBSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -4355,7 +4441,7 @@ def __init__(self, **kwargs): self.type = 'AzureMySqlSink' -class AzureMySqlSource(CopySource): +class AzureMySqlSource(TabularSource): """A copy activity Azure MySQL source. All required parameters must be populated in order to send to Azure. @@ -4376,6 +4462,10 @@ class AzureMySqlSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -4391,6 +4481,7 @@ class AzureMySqlSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -4572,7 +4663,7 @@ def __init__(self, **kwargs): self.type = 'AzurePostgreSqlSink' -class AzurePostgreSqlSource(CopySource): +class AzurePostgreSqlSource(TabularSource): """A copy activity Azure PostgreSQL source. All required parameters must be populated in order to send to Azure. @@ -4593,6 +4684,10 @@ class AzurePostgreSqlSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -4608,6 +4703,7 @@ class AzurePostgreSqlSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5358,7 +5454,7 @@ def __init__(self, **kwargs): self.type = 'AzureSqlSink' -class AzureSqlSource(CopySource): +class AzureSqlSource(TabularSource): """A copy activity Azure SQL source. All required parameters must be populated in order to send to Azure. @@ -5379,6 +5475,10 @@ class AzureSqlSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -5404,6 +5504,7 @@ class AzureSqlSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -5690,7 +5791,7 @@ def __init__(self, **kwargs): self.type = 'AzureTableSink' -class AzureTableSource(CopySource): +class AzureTableSource(TabularSource): """A copy activity Azure Table source. All required parameters must be populated in order to send to Azure. @@ -5711,6 +5812,10 @@ class AzureTableSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object @@ -5730,6 +5835,7 @@ class AzureTableSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } @@ -6421,7 +6527,7 @@ def __init__(self, **kwargs): self.type = 'Cassandra' -class CassandraSource(CopySource): +class CassandraSource(TabularSource): """A copy activity source for a Cassandra database. All required parameters must be populated in order to send to Azure. @@ -6442,6 +6548,10 @@ class CassandraSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). @@ -6468,6 +6578,7 @@ class CassandraSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } @@ -7174,7 +7285,7 @@ def __init__(self, **kwargs): self.type = 'ConcurObject' -class ConcurSource(CopySource): +class ConcurSource(TabularSource): """A copy activity Concur Service source. All required parameters must be populated in order to send to Azure. @@ -7195,6 +7306,10 @@ class ConcurSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -7210,6 +7325,7 @@ class ConcurSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -7352,13 +7468,18 @@ class CosmosDbLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param connection_string: Required. The connection string. Type: string, + :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: + string (or Expression with resultType string) + :type account_endpoint: object + :param database: The name of the database. Type: string (or Expression + with resultType string) + :type database: object + :param account_key: The account key of the Azure CosmosDB account. Type: + SecureString or AzureKeyVaultSecretReference. + :type account_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -7367,7 +7488,6 @@ class CosmosDbLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { @@ -7378,13 +7498,17 @@ class CosmosDbLinkedService(LinkedService): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__(self, **kwargs): super(CosmosDbLinkedService, self).__init__(**kwargs) self.connection_string = kwargs.get('connection_string', None) + self.account_endpoint = kwargs.get('account_endpoint', None) + self.database = kwargs.get('database', None) self.account_key = kwargs.get('account_key', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'CosmosDb' @@ -7596,6 +7720,10 @@ class CosmosDbMongoDbApiSource(CopySource): main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object """ _validation = { @@ -7611,6 +7739,7 @@ class CosmosDbMongoDbApiSource(CopySource): 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } def __init__(self, **kwargs): @@ -7618,69 +7747,128 @@ def __init__(self, **kwargs): self.filter = kwargs.get('filter', None) self.cursor_methods = kwargs.get('cursor_methods', None) self.batch_size = kwargs.get('batch_size', None) + self.query_timeout = kwargs.get('query_timeout', None) self.type = 'CosmosDbMongoDbApiSource' -class CouchbaseLinkedService(LinkedService): - """Couchbase server linked service. +class CosmosDbSqlApiCollectionDataset(Dataset): + """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - linked service. + Dataset. :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param cred_string: The Azure key vault secret reference of credString in - connection string. - :type cred_string: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :param collection_name: Required. CosmosDB (SQL API) collection name. + Type: string (or Expression with resultType string). + :type collection_name: object """ _validation = { + 'linked_service_name': {'required': True}, 'type': {'required': True}, + 'collection_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } def __init__(self, **kwargs): - super(CouchbaseLinkedService, self).__init__(**kwargs) - self.connection_string = kwargs.get('connection_string', None) - self.cred_string = kwargs.get('cred_string', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) - self.type = 'Couchbase' + super(CosmosDbSqlApiCollectionDataset, self).__init__(**kwargs) + self.collection_name = kwargs.get('collection_name', None) + self.type = 'CosmosDbSqlApiCollection' -class CouchbaseSource(CopySource): - """A copy activity Couchbase server source. +class CosmosDbSqlApiSink(CopySink): + """A copy activity Azure CosmosDB (SQL API) Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Type: string (or Expression with resultType string). Allowed values: + insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CosmosDbSqlApiSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'CosmosDbSqlApiSink' + + +class CosmosDbSqlApiSource(CopySource): + """A copy activity Azure CosmosDB (SQL API) Collection source. All required parameters must be populated in order to send to Azure. @@ -7700,9 +7888,15 @@ class CouchbaseSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). + :param query: SQL API query. Type: string (or Expression with resultType + string). :type query: object + :param page_size: Page size of the result. Type: integer (or Expression + with resultType integer). + :type page_size: object + :param preferred_regions: Preferred regions. Type: array of strings (or + Expression with resultType array of strings). + :type preferred_regions: object """ _validation = { @@ -7716,29 +7910,141 @@ class CouchbaseSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'page_size': {'key': 'pageSize', 'type': 'object'}, + 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, } def __init__(self, **kwargs): - super(CouchbaseSource, self).__init__(**kwargs) + super(CosmosDbSqlApiSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) - self.type = 'CouchbaseSource' + self.page_size = kwargs.get('page_size', None) + self.preferred_regions = kwargs.get('preferred_regions', None) + self.type = 'CosmosDbSqlApiSource' -class CouchbaseTableDataset(Dataset): - """Couchbase server dataset. +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param description: Dataset description. + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. :type description: str - :param structure: Columns that define the structure of the dataset. Type: - array (or Expression with resultType array), itemType: DatasetDataElement. - :type structure: object - :param schema: Columns that define the physical type schema of the - dataset. Type: array (or Expression with resultType array), itemType: + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in + connection string. + :type cred_string: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CouchbaseLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.cred_string = kwargs.get('cred_string', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Couchbase' + + +class CouchbaseSource(TabularSource): + """A copy activity Couchbase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(CouchbaseSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.type = 'CouchbaseSource' + + +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. :type schema: object :param linked_service_name: Required. Linked service reference. @@ -7784,6 +8090,58 @@ def __init__(self, **kwargs): self.type = 'CouchbaseTable' +class CreateDataFlowDebugSessionRequest(Model): + """Request body structure for creating data flow debug session. + + :param compute_type: Compute type of the cluster. The value will be + overwritten by the same setting in integration runtime if provided. + :type compute_type: str + :param core_count: Core count of the cluster. The value will be + overwritten by the same setting in integration runtime if provided. + :type core_count: int + :param time_to_live: Time to live setting of the cluster in minutes. + :type time_to_live: int + :param integration_runtime: Set to use integration runtime setting for + data flow debug session. + :type integration_runtime: + ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + """ + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + 'integration_runtime': {'key': 'integrationRuntime', 'type': 'IntegrationRuntimeResource'}, + } + + def __init__(self, **kwargs): + super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) + self.compute_type = kwargs.get('compute_type', None) + self.core_count = kwargs.get('core_count', None) + self.time_to_live = kwargs.get('time_to_live', None) + self.integration_runtime = kwargs.get('integration_runtime', None) + + +class CreateDataFlowDebugSessionResponse(Model): + """Response body structure for creating data flow debug session. + + :param status: The state of the debug session. + :type status: str + :param session_id: The ID of data flow debug session. + :type session_id: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.session_id = kwargs.get('session_id', None) + + class CreateLinkedIntegrationRuntimeRequest(Model): """The linked integration runtime information. @@ -8106,130 +8464,660 @@ def __init__(self, **kwargs): self.type = 'DatabricksNotebook' -class DatabricksSparkJarActivity(ExecutionActivity): - """DatabricksSparkJar activity. +class DatabricksSparkJarActivity(ExecutionActivity): + """DatabricksSparkJar activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param main_class_name: Required. The full name of the class containing + the main method to be executed. This class must be contained in a JAR + provided as a library. Type: string (or Expression with resultType + string). + :type main_class_name: object + :param parameters: Parameters that will be passed to the main method. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'main_class_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, **kwargs): + super(DatabricksSparkJarActivity, self).__init__(**kwargs) + self.main_class_name = kwargs.get('main_class_name', None) + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) + self.type = 'DatabricksSparkJar' + + +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param python_file: Required. The URI of the Python file to be executed. + DBFS paths are supported. Type: string (or Expression with resultType + string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the + Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'python_file': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, **kwargs): + super(DatabricksSparkPythonActivity, self).__init__(**kwargs) + self.python_file = kwargs.get('python_file', None) + self.parameters = kwargs.get('parameters', None) + self.libraries = kwargs.get('libraries', None) + self.type = 'DatabricksSparkPython' + + +class DataFlow(Model): + """Azure Data Factory nested object which contains a flow with data movements + and transformations. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: MappingDataFlow + + All required parameters must be populated in order to send to Azure. + + :param description: The description of the data flow. + :type description: str + :param annotations: List of tags that can be used for describing the data + flow. + :type annotations: list[object] + :param folder: The folder that this data flow is in. If not specified, + Data flow will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'MappingDataFlow': 'MappingDataFlow'} + } + + def __init__(self, **kwargs): + super(DataFlow, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.annotations = kwargs.get('annotations', None) + self.folder = kwargs.get('folder', None) + self.type = None + + +class DataFlowDebugCommandPayload(Model): + """Structure of command payload. + + All required parameters must be populated in order to send to Azure. + + :param stream_name: Required. The stream name which is used for preview. + :type stream_name: str + :param row_limits: Row limits for preview response. + :type row_limits: int + :param columns: Array of column names. + :type columns: list[str] + :param expression: The expression which is used for preview. + :type expression: str + """ + + _validation = { + 'stream_name': {'required': True}, + } + + _attribute_map = { + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'row_limits': {'key': 'rowLimits', 'type': 'int'}, + 'columns': {'key': 'columns', 'type': '[str]'}, + 'expression': {'key': 'expression', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DataFlowDebugCommandPayload, self).__init__(**kwargs) + self.stream_name = kwargs.get('stream_name', None) + self.row_limits = kwargs.get('row_limits', None) + self.columns = kwargs.get('columns', None) + self.expression = kwargs.get('expression', None) + + +class DataFlowDebugCommandRequest(Model): + """Request body structure for data flow debug command. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param command: The command type. Possible values include: + 'executePreviewQuery', 'executeStatisticsQuery', 'executeExpressionQuery' + :type command: str or + ~azure.mgmt.datafactory.models.DataFlowDebugCommandType + :param command_payload: The command payload object. + :type command_payload: + ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'command': {'key': 'command', 'type': 'str'}, + 'command_payload': {'key': 'commandPayload', 'type': 'DataFlowDebugCommandPayload'}, + } + + def __init__(self, **kwargs): + super(DataFlowDebugCommandRequest, self).__init__(**kwargs) + self.session_id = kwargs.get('session_id', None) + self.command = kwargs.get('command', None) + self.command_payload = kwargs.get('command_payload', None) + + +class DataFlowDebugCommandResponse(Model): + """Response body structure of data flow result for data preview, statistics or + expression preview. + + :param status: The run status of data preview, statistics or expression + preview. + :type status: str + :param data: The result data of data preview, statistics or expression + preview. + :type data: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'data': {'key': 'data', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DataFlowDebugCommandResponse, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.data = kwargs.get('data', None) + + +class DataFlowDebugPackage(Model): + """Request body structure for starting data flow debug session. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow: Data flow instance. + :type data_flow: ~azure.mgmt.datafactory.models.DataFlowResource + :param datasets: List of datasets. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetResource] + :param linked_services: List of linked services. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceResource] + :param staging: Staging info for debug session. + :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :param debug_settings: Data flow debug settings. + :type debug_settings: + ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowResource'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetResource]'}, + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceResource]'}, + 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, + 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, + } + + def __init__(self, **kwargs): + super(DataFlowDebugPackage, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.session_id = kwargs.get('session_id', None) + self.data_flow = kwargs.get('data_flow', None) + self.datasets = kwargs.get('datasets', None) + self.linked_services = kwargs.get('linked_services', None) + self.staging = kwargs.get('staging', None) + self.debug_settings = kwargs.get('debug_settings', None) + + +class DataFlowDebugPackageDebugSettings(Model): + """Data flow debug settings. + + :param source_settings: Source setting for data flow debug. + :type source_settings: + list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] + :param parameters: Data flow parameters. + :type parameters: dict[str, object] + :param dataset_parameters: Parameters for dataset. + :type dataset_parameters: object + """ + + _attribute_map = { + 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) + self.source_settings = kwargs.get('source_settings', None) + self.parameters = kwargs.get('parameters', None) + self.dataset_parameters = kwargs.get('dataset_parameters', None) + + +class DataFlowDebugSessionInfo(Model): + """Data flow debug session info. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param data_flow_name: The name of the data flow. + :type data_flow_name: str + :param compute_type: Compute type of the cluster. + :type compute_type: str + :param core_count: Core count of the cluster. + :type core_count: int + :param node_count: Node count of the cluster. (deprecated property) + :type node_count: int + :param integration_runtime_name: Attached integration runtime name of data + flow debug session. + :type integration_runtime_name: str + :param session_id: The ID of data flow debug session. + :type session_id: str + :param start_time: Start time of data flow debug session. + :type start_time: str + :param time_to_live_in_minutes: Compute type of the cluster. + :type time_to_live_in_minutes: int + :param last_activity_time: Last activity time of data flow debug session. + :type last_activity_time: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'node_count': {'key': 'nodeCount', 'type': 'int'}, + 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, + 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DataFlowDebugSessionInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.data_flow_name = kwargs.get('data_flow_name', None) + self.compute_type = kwargs.get('compute_type', None) + self.core_count = kwargs.get('core_count', None) + self.node_count = kwargs.get('node_count', None) + self.integration_runtime_name = kwargs.get('integration_runtime_name', None) + self.session_id = kwargs.get('session_id', None) + self.start_time = kwargs.get('start_time', None) + self.time_to_live_in_minutes = kwargs.get('time_to_live_in_minutes', None) + self.last_activity_time = kwargs.get('last_activity_time', None) + + +class DataFlowFolder(Model): + """The folder that this data flow is in. If not specified, Data flow will + appear at the root level. + + :param name: The name of the folder that this data flow is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DataFlowFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + +class DataFlowReference(Model): + """Data flow reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar type: Required. Data flow reference type. Default value: + "DataFlowReference" . + :vartype type: str + :param reference_name: Required. Reference data flow name. + :type reference_name: str + :param dataset_parameters: Reference data flow parameters from dataset. + :type dataset_parameters: object + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + } + + type = "DataFlowReference" + + def __init__(self, **kwargs): + super(DataFlowReference, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.reference_name = kwargs.get('reference_name', None) + self.dataset_parameters = kwargs.get('dataset_parameters', None) + + +class SubResource(Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None + + +class DataFlowResource(SubResource): + """Data flow resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Data flow properties. + :type properties: ~azure.mgmt.datafactory.models.DataFlow + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataFlow'}, + } + + def __init__(self, **kwargs): + super(DataFlowResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class Transformation(Model): + """A data flow transformation. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Transformation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + + +class DataFlowSink(Transformation): + """Transformation for data flow sink. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Required. Transformation name. :type name: str - :param description: Activity description. + :param description: Transformation description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param main_class_name: Required. The full name of the class containing - the main method to be executed. This class must be contained in a JAR - provided as a library. Type: string (or Expression with resultType - string). - :type main_class_name: object - :param parameters: Parameters that will be passed to the main method. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] + :param dataset: Dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ _validation = { 'name': {'required': True}, - 'type': {'required': True}, - 'main_class_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, } def __init__(self, **kwargs): - super(DatabricksSparkJarActivity, self).__init__(**kwargs) - self.main_class_name = kwargs.get('main_class_name', None) - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) - self.type = 'DatabricksSparkJar' + super(DataFlowSink, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.dataset = kwargs.get('dataset', None) -class DatabricksSparkPythonActivity(ExecutionActivity): - """DatabricksSparkPython activity. +class DataFlowSource(Transformation): + """Transformation for data flow source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Required. Transformation name. :type name: str - :param description: Activity description. + :param description: Transformation description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. - DBFS paths are supported. Type: string (or Expression with resultType - string). - :type python_file: object - :param parameters: Command line parameters that will be passed to the - Python file. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] + :param dataset: Dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ _validation = { 'name': {'required': True}, - 'type': {'required': True}, - 'python_file': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, } def __init__(self, **kwargs): - super(DatabricksSparkPythonActivity, self).__init__(**kwargs) - self.python_file = kwargs.get('python_file', None) - self.parameters = kwargs.get('parameters', None) - self.libraries = kwargs.get('libraries', None) - self.type = 'DatabricksSparkPython' + super(DataFlowSource, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.dataset = kwargs.get('dataset', None) + + +class DataFlowSourceSetting(Model): + """Definition of data flow source setting for debug. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_name: The data flow source name. + :type source_name: str + :param row_limit: Defines the row limit of data flow source in debug. + :type row_limit: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_name': {'key': 'sourceName', 'type': 'str'}, + 'row_limit': {'key': 'rowLimit', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(DataFlowSourceSetting, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.source_name = kwargs.get('source_name', None) + self.row_limit = kwargs.get('row_limit', None) + + +class DataFlowStagingInfo(Model): + """Staging info for execute data flow activity. + + :param linked_service: Staging linked service reference. + :type linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param folder_path: Folder path for staging blob. + :type folder_path: str + """ + + _attribute_map = { + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'folderPath', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DataFlowStagingInfo, self).__init__(**kwargs) + self.linked_service = kwargs.get('linked_service', None) + self.folder_path = kwargs.get('folder_path', None) class DataLakeAnalyticsUSQLActivity(ExecutionActivity): @@ -8492,44 +9380,6 @@ def __init__(self, **kwargs): self.parameters = kwargs.get('parameters', None) -class SubResource(Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__(self, **kwargs): - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None - - class DatasetResource(SubResource): """Dataset resource type. @@ -8675,7 +9525,7 @@ def __init__(self, **kwargs): self.type = 'Db2' -class Db2Source(CopySource): +class Db2Source(TabularSource): """A copy activity source for Db2 databases. All required parameters must be populated in order to send to Azure. @@ -8696,6 +9546,10 @@ class Db2Source(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -8711,6 +9565,7 @@ class Db2Source(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -8865,6 +9720,22 @@ def __init__(self, **kwargs): self.type = 'Delete' +class DeleteDataFlowDebugSessionRequest(Model): + """Request body structure for deleting data flow debug session. + + :param session_id: The ID of data flow debug session. + :type session_id: str + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) + self.session_id = kwargs.get('session_id', None) + + class DelimitedTextDataset(Dataset): """Delimited text dataset. @@ -9337,7 +10208,8 @@ class DocumentDbCollectionSink(CopySink): Type: string (or Expression with resultType string). :type nesting_separator: object :param write_behavior: Describes how to write data to Azure Cosmos DB. - Allowed values: insert and upsert. + Type: string (or Expression with resultType string). Allowed values: + insert and upsert. :type write_behavior: object """ @@ -9391,6 +10263,10 @@ class DocumentDbCollectionSource(CopySource): :param nesting_separator: Nested properties separator. Type: string (or Expression with resultType string). :type nesting_separator: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object """ _validation = { @@ -9405,12 +10281,14 @@ class DocumentDbCollectionSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } def __init__(self, **kwargs): super(DocumentDbCollectionSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) self.nesting_separator = kwargs.get('nesting_separator', None) + self.query_timeout = kwargs.get('query_timeout', None) self.type = 'DocumentDbCollectionSource' @@ -9471,7 +10349,7 @@ def __init__(self, **kwargs): self.type = 'Drill' -class DrillSource(CopySource): +class DrillSource(TabularSource): """A copy activity Drill server source. All required parameters must be populated in order to send to Azure. @@ -9492,6 +10370,10 @@ class DrillSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -9507,6 +10389,7 @@ class DrillSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -9728,7 +10611,7 @@ def __init__(self, **kwargs): self.type = 'DynamicsAXResource' -class DynamicsAXSource(CopySource): +class DynamicsAXSource(TabularSource): """A copy activity Dynamics AX source. All required parameters must be populated in order to send to Azure. @@ -9749,6 +10632,10 @@ class DynamicsAXSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -9764,6 +10651,7 @@ class DynamicsAXSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -10463,7 +11351,7 @@ def __init__(self, **kwargs): self.type = 'EloquaObject' -class EloquaSource(CopySource): +class EloquaSource(TabularSource): """A copy activity Eloqua server source. All required parameters must be populated in order to send to Azure. @@ -10484,6 +11372,10 @@ class EloquaSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -10499,6 +11391,7 @@ class EloquaSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -10562,6 +11455,66 @@ def __init__(self, **kwargs): self.type = 'EnvironmentVariableSetup' +class ExecuteDataFlowActivity(ExecutionActivity): + """Execute data flow activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param data_flow: Required. Data flow reference. + :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :param staging: Staging info for execute data flow activity. + :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :param integration_runtime: The integration runtime reference. + :type integration_runtime: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'data_flow': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, + 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, + 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, + } + + def __init__(self, **kwargs): + super(ExecuteDataFlowActivity, self).__init__(**kwargs) + self.data_flow = kwargs.get('data_flow', None) + self.staging = kwargs.get('staging', None) + self.integration_runtime = kwargs.get('integration_runtime', None) + self.type = 'ExecuteDataFlow' + + class ExecutePipelineActivity(ControlActivity): """Execute pipeline activity. @@ -11791,19 +12744,40 @@ class FtpServerLocation(DatasetLocation): :type file_name: object """ - _validation = { - 'type': {'required': True}, - } - + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(FtpServerLocation, self).__init__(**kwargs) + + +class GetDataFactoryOperationStatusResponse(Model): + """Response body structure for get data factory operation status. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param status: Status of the operation. + :type status: str + """ + _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, + 'status': {'key': 'status', 'type': 'str'}, } def __init__(self, **kwargs): - super(FtpServerLocation, self).__init__(**kwargs) + super(GetDataFactoryOperationStatusResponse, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.status = kwargs.get('status', None) class GetMetadataActivity(ExecutionActivity): @@ -12092,7 +13066,7 @@ def __init__(self, **kwargs): self.type = 'GoogleAdWordsObject' -class GoogleAdWordsSource(CopySource): +class GoogleAdWordsSource(TabularSource): """A copy activity Google AdWords service source. All required parameters must be populated in order to send to Azure. @@ -12113,6 +13087,10 @@ class GoogleAdWordsSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12128,6 +13106,7 @@ class GoogleAdWordsSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -12319,7 +13298,7 @@ def __init__(self, **kwargs): self.type = 'GoogleBigQueryObject' -class GoogleBigQuerySource(CopySource): +class GoogleBigQuerySource(TabularSource): """A copy activity Google BigQuery service source. All required parameters must be populated in order to send to Azure. @@ -12340,6 +13319,10 @@ class GoogleBigQuerySource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12355,6 +13338,7 @@ class GoogleBigQuerySource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -12421,7 +13405,7 @@ def __init__(self, **kwargs): self.type = 'Greenplum' -class GreenplumSource(CopySource): +class GreenplumSource(TabularSource): """A copy activity Greenplum Database source. All required parameters must be populated in order to send to Azure. @@ -12442,6 +13426,10 @@ class GreenplumSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12457,6 +13445,7 @@ class GreenplumSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -12698,7 +13687,7 @@ def __init__(self, **kwargs): self.type = 'HBaseObject' -class HBaseSource(CopySource): +class HBaseSource(TabularSource): """A copy activity HBase server source. All required parameters must be populated in order to send to Azure. @@ -12719,6 +13708,10 @@ class HBaseSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12734,6 +13727,7 @@ class HBaseSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -13918,7 +14912,7 @@ def __init__(self, **kwargs): self.type = 'HiveObject' -class HiveSource(CopySource): +class HiveSource(TabularSource): """A copy activity Hive Server source. All required parameters must be populated in order to send to Azure. @@ -13939,6 +14933,10 @@ class HiveSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13954,6 +14952,7 @@ class HiveSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14424,7 +15423,7 @@ def __init__(self, **kwargs): self.type = 'HubspotObject' -class HubspotSource(CopySource): +class HubspotSource(TabularSource): """A copy activity Hubspot Service source. All required parameters must be populated in order to send to Azure. @@ -14445,6 +15444,10 @@ class HubspotSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14460,6 +15463,7 @@ class HubspotSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14704,7 +15708,7 @@ def __init__(self, **kwargs): self.type = 'ImpalaObject' -class ImpalaSource(CopySource): +class ImpalaSource(TabularSource): """A copy activity Impala server source. All required parameters must be populated in order to send to Azure. @@ -14725,6 +15729,10 @@ class ImpalaSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14740,6 +15748,7 @@ class ImpalaSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14877,7 +15886,7 @@ def __init__(self, **kwargs): self.type = 'InformixSink' -class InformixSource(CopySource): +class InformixSource(TabularSource): """A copy activity source for Informix. All required parameters must be populated in order to send to Azure. @@ -14898,6 +15907,10 @@ class InformixSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -14913,6 +15926,7 @@ class InformixSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -15777,7 +16791,7 @@ def __init__(self, **kwargs): self.type = 'JiraObject' -class JiraSource(CopySource): +class JiraSource(TabularSource): """A copy activity Jira Service source. All required parameters must be populated in order to send to Azure. @@ -15798,6 +16812,10 @@ class JiraSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15813,6 +16831,7 @@ class JiraSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16560,7 +17579,7 @@ def __init__(self, **kwargs): self.type = 'MagentoObject' -class MagentoSource(CopySource): +class MagentoSource(TabularSource): """A copy activity Magento server source. All required parameters must be populated in order to send to Azure. @@ -16581,6 +17600,10 @@ class MagentoSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16596,6 +17619,7 @@ class MagentoSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16860,6 +17884,55 @@ def __init__(self, **kwargs): self.type = 'Managed' +class MappingDataFlow(DataFlow): + """Mapping data flow. + + All required parameters must be populated in order to send to Azure. + + :param description: The description of the data flow. + :type description: str + :param annotations: List of tags that can be used for describing the data + flow. + :type annotations: list[object] + :param folder: The folder that this data flow is in. If not specified, + Data flow will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :param type: Required. Constant filled by server. + :type type: str + :param sources: List of sources in data flow. + :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :param sinks: List of sinks in data flow. + :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :param transformations: List of transformations in data flow. + :type transformations: list[~azure.mgmt.datafactory.models.Transformation] + :param script: DataFlow script. + :type script: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, + 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, + 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, + 'script': {'key': 'typeProperties.script', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(MappingDataFlow, self).__init__(**kwargs) + self.sources = kwargs.get('sources', None) + self.sinks = kwargs.get('sinks', None) + self.transformations = kwargs.get('transformations', None) + self.script = kwargs.get('script', None) + self.type = 'MappingDataFlow' + + class MariaDBLinkedService(LinkedService): """MariaDB server linked service. @@ -16917,7 +17990,7 @@ def __init__(self, **kwargs): self.type = 'MariaDB' -class MariaDBSource(CopySource): +class MariaDBSource(TabularSource): """A copy activity MariaDB server source. All required parameters must be populated in order to send to Azure. @@ -16938,6 +18011,10 @@ class MariaDBSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16953,6 +18030,7 @@ class MariaDBSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -17160,7 +18238,7 @@ def __init__(self, **kwargs): self.type = 'MarketoObject' -class MarketoSource(CopySource): +class MarketoSource(TabularSource): """A copy activity Marketo server source. All required parameters must be populated in order to send to Azure. @@ -17181,6 +18259,10 @@ class MarketoSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -17196,6 +18278,7 @@ class MarketoSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -17831,6 +18914,10 @@ class MongoDbV2Source(CopySource): main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object """ _validation = { @@ -17846,6 +18933,7 @@ class MongoDbV2Source(CopySource): 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } def __init__(self, **kwargs): @@ -17853,6 +18941,7 @@ def __init__(self, **kwargs): self.filter = kwargs.get('filter', None) self.cursor_methods = kwargs.get('cursor_methods', None) self.batch_size = kwargs.get('batch_size', None) + self.query_timeout = kwargs.get('query_timeout', None) self.type = 'MongoDbV2Source' @@ -17914,7 +19003,7 @@ def __init__(self, **kwargs): self.type = 'MySql' -class MySqlSource(CopySource): +class MySqlSource(TabularSource): """A copy activity source for MySQL databases. All required parameters must be populated in order to send to Azure. @@ -17935,6 +19024,10 @@ class MySqlSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -17950,6 +19043,7 @@ class MySqlSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18106,7 +19200,7 @@ def __init__(self, **kwargs): self.partition_lower_bound = kwargs.get('partition_lower_bound', None) -class NetezzaSource(CopySource): +class NetezzaSource(TabularSource): """A copy activity Netezza source. All required parameters must be populated in order to send to Azure. @@ -18127,6 +19221,10 @@ class NetezzaSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18151,6 +19249,7 @@ class NetezzaSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, @@ -18582,7 +19681,7 @@ def __init__(self, **kwargs): self.type = 'OdbcSink' -class OdbcSource(CopySource): +class OdbcSource(TabularSource): """A copy activity source for ODBC databases. All required parameters must be populated in order to send to Azure. @@ -18603,6 +19702,10 @@ class OdbcSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -18618,6 +19721,7 @@ class OdbcSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -19340,7 +20444,7 @@ def __init__(self, **kwargs): self.type = 'OracleServiceCloudObject' -class OracleServiceCloudSource(CopySource): +class OracleServiceCloudSource(TabularSource): """A copy activity Oracle Service Cloud source. All required parameters must be populated in order to send to Azure. @@ -19361,6 +20465,10 @@ class OracleServiceCloudSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -19376,6 +20484,7 @@ class OracleServiceCloudSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20126,7 +21235,7 @@ def __init__(self, **kwargs): self.type = 'PaypalObject' -class PaypalSource(CopySource): +class PaypalSource(TabularSource): """A copy activity Paypal Service source. All required parameters must be populated in order to send to Azure. @@ -20147,6 +21256,10 @@ class PaypalSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20162,6 +21275,7 @@ class PaypalSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20350,7 +21464,7 @@ def __init__(self, **kwargs): self.type = 'PhoenixObject' -class PhoenixSource(CopySource): +class PhoenixSource(TabularSource): """A copy activity Phoenix server source. All required parameters must be populated in order to send to Azure. @@ -20371,6 +21485,10 @@ class PhoenixSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20386,6 +21504,7 @@ class PhoenixSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20775,7 +21894,7 @@ def __init__(self, **kwargs): self.type = 'PostgreSql' -class PostgreSqlSource(CopySource): +class PostgreSqlSource(TabularSource): """A copy activity source for PostgreSQL databases. All required parameters must be populated in order to send to Azure. @@ -20796,6 +21915,10 @@ class PostgreSqlSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -20811,6 +21934,7 @@ class PostgreSqlSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21080,7 +22204,7 @@ def __init__(self, **kwargs): self.type = 'PrestoObject' -class PrestoSource(CopySource): +class PrestoSource(TabularSource): """A copy activity Presto server source. All required parameters must be populated in order to send to Azure. @@ -21101,6 +22225,10 @@ class PrestoSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21116,6 +22244,7 @@ class PrestoSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21273,7 +22402,7 @@ def __init__(self, **kwargs): self.type = 'QuickBooksObject' -class QuickBooksSource(CopySource): +class QuickBooksSource(TabularSource): """A copy activity QuickBooks server source. All required parameters must be populated in order to send to Azure. @@ -21294,6 +22423,10 @@ class QuickBooksSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21309,6 +22442,7 @@ class QuickBooksSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21843,7 +22977,7 @@ def __init__(self, **kwargs): self.type = 'ResponsysObject' -class ResponsysSource(CopySource): +class ResponsysSource(TabularSource): """A copy activity Responsys source. All required parameters must be populated in order to send to Azure. @@ -21864,6 +22998,10 @@ class ResponsysSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21879,6 +23017,7 @@ class ResponsysSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22490,7 +23629,7 @@ def __init__(self, **kwargs): self.type = 'SalesforceMarketingCloudObject' -class SalesforceMarketingCloudSource(CopySource): +class SalesforceMarketingCloudSource(TabularSource): """A copy activity Salesforce Marketing Cloud source. All required parameters must be populated in order to send to Azure. @@ -22511,6 +23650,10 @@ class SalesforceMarketingCloudSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22526,6 +23669,7 @@ class SalesforceMarketingCloudSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22925,7 +24069,7 @@ def __init__(self, **kwargs): self.type = 'SalesforceSink' -class SalesforceSource(CopySource): +class SalesforceSource(TabularSource): """A copy activity Salesforce source. All required parameters must be populated in order to send to Azure. @@ -22946,6 +24090,10 @@ class SalesforceSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -22965,6 +24113,7 @@ class SalesforceSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } @@ -23107,7 +24256,7 @@ def __init__(self, **kwargs): self.type = 'SapBW' -class SapBwSource(CopySource): +class SapBwSource(TabularSource): """A copy activity source for SapBW server via MDX. All required parameters must be populated in order to send to Azure. @@ -23128,6 +24277,10 @@ class SapBwSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: MDX query. Type: string (or Expression with resultType string). :type query: object @@ -23143,6 +24296,7 @@ class SapBwSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23332,7 +24486,7 @@ def __init__(self, **kwargs): self.type = 'SapCloudForCustomerSink' -class SapCloudForCustomerSource(CopySource): +class SapCloudForCustomerSource(TabularSource): """A copy activity source for SAP Cloud for Customer source. All required parameters must be populated in order to send to Azure. @@ -23353,6 +24507,10 @@ class SapCloudForCustomerSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -23368,6 +24526,7 @@ class SapCloudForCustomerSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23502,7 +24661,7 @@ def __init__(self, **kwargs): self.type = 'SapEccResource' -class SapEccSource(CopySource): +class SapEccSource(TabularSource): """A copy activity source for SAP ECC source. All required parameters must be populated in order to send to Azure. @@ -23523,6 +24682,10 @@ class SapEccSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -23538,6 +24701,7 @@ class SapEccSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23620,7 +24784,7 @@ def __init__(self, **kwargs): self.type = 'SapHana' -class SapHanaSource(CopySource): +class SapHanaSource(TabularSource): """A copy activity source for SAP HANA source. All required parameters must be populated in order to send to Azure. @@ -23641,6 +24805,10 @@ class SapHanaSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object @@ -23659,6 +24827,7 @@ class SapHanaSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, } @@ -23822,7 +24991,7 @@ def __init__(self, **kwargs): self.type = 'SapOpenHub' -class SapOpenHubSource(CopySource): +class SapOpenHubSource(TabularSource): """A copy activity source for SAP Business Warehouse Open Hub Destination source. @@ -23844,6 +25013,10 @@ class SapOpenHubSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). @@ -23865,6 +25038,7 @@ class SapOpenHubSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, } @@ -24175,7 +25349,7 @@ def __init__(self, **kwargs): self.type = 'SapTableResource' -class SapTableSource(CopySource): +class SapTableSource(TabularSource): """A copy activity source for SAP Table source. All required parameters must be populated in order to send to Azure. @@ -24196,6 +25370,10 @@ class SapTableSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object @@ -24240,6 +25418,7 @@ class SapTableSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, @@ -24905,7 +26084,7 @@ def __init__(self, **kwargs): self.type = 'ServiceNowObject' -class ServiceNowSource(CopySource): +class ServiceNowSource(TabularSource): """A copy activity ServiceNow server source. All required parameters must be populated in order to send to Azure. @@ -24926,6 +26105,10 @@ class ServiceNowSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24941,6 +26124,7 @@ class ServiceNowSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25327,7 +26511,7 @@ def __init__(self, **kwargs): self.type = 'ShopifyObject' -class ShopifySource(CopySource): +class ShopifySource(TabularSource): """A copy activity Shopify Service source. All required parameters must be populated in order to send to Azure. @@ -25348,6 +26532,10 @@ class ShopifySource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -25363,6 +26551,7 @@ class ShopifySource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25561,7 +26750,7 @@ def __init__(self, **kwargs): self.type = 'SparkObject' -class SparkSource(CopySource): +class SparkSource(TabularSource): """A copy activity Spark Server source. All required parameters must be populated in order to send to Azure. @@ -25582,6 +26771,10 @@ class SparkSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -25597,6 +26790,7 @@ class SparkSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25677,7 +26871,7 @@ def __init__(self, **kwargs): self.type = 'SqlDWSink' -class SqlDWSource(CopySource): +class SqlDWSource(TabularSource): """A copy activity SQL Data Warehouse source. All required parameters must be populated in order to send to Azure. @@ -25698,6 +26892,10 @@ class SqlDWSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -25722,6 +26920,7 @@ class SqlDWSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, @@ -25816,7 +27015,7 @@ def __init__(self, **kwargs): self.type = 'SqlMISink' -class SqlMISource(CopySource): +class SqlMISource(TabularSource): """A copy activity Azure SQL Managed Instance source. All required parameters must be populated in order to send to Azure. @@ -25837,6 +27036,10 @@ class SqlMISource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -25862,6 +27065,7 @@ class SqlMISource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -26020,7 +27224,7 @@ def __init__(self, **kwargs): self.type = 'SqlServerSink' -class SqlServerSource(CopySource): +class SqlServerSource(TabularSource): """A copy activity SQL server source. All required parameters must be populated in order to send to Azure. @@ -26041,6 +27245,10 @@ class SqlServerSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -26066,6 +27274,7 @@ class SqlServerSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -26290,7 +27499,7 @@ def __init__(self, **kwargs): self.type = 'SqlSink' -class SqlSource(CopySource): +class SqlSource(TabularSource): """A copy activity SQL source. All required parameters must be populated in order to send to Azure. @@ -26311,6 +27520,10 @@ class SqlSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -26334,6 +27547,7 @@ class SqlSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -26493,7 +27707,7 @@ def __init__(self, **kwargs): self.type = 'SquareObject' -class SquareSource(CopySource): +class SquareSource(TabularSource): """A copy activity Square Service source. All required parameters must be populated in order to send to Azure. @@ -26514,6 +27728,10 @@ class SquareSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -26529,6 +27747,7 @@ class SquareSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27264,7 +28483,7 @@ def __init__(self, **kwargs): self.type = 'Sybase' -class SybaseSource(CopySource): +class SybaseSource(TabularSource): """A copy activity source for Sybase databases. All required parameters must be populated in order to send to Azure. @@ -27285,6 +28504,10 @@ class SybaseSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -27300,6 +28523,7 @@ class SybaseSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27471,7 +28695,7 @@ def __init__(self, **kwargs): self.partition_lower_bound = kwargs.get('partition_lower_bound', None) -class TeradataSource(CopySource): +class TeradataSource(TabularSource): """A copy activity Teradata source. All required parameters must be populated in order to send to Azure. @@ -27492,6 +28716,10 @@ class TeradataSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object @@ -27516,6 +28744,7 @@ class TeradataSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, @@ -28407,7 +29636,7 @@ def __init__(self, **kwargs): self.type = 'Vertica' -class VerticaSource(CopySource): +class VerticaSource(TabularSource): """A copy activity Vertica source. All required parameters must be populated in order to send to Azure. @@ -28428,6 +29657,10 @@ class VerticaSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -28443,6 +29676,7 @@ class VerticaSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -29215,7 +30449,7 @@ def __init__(self, **kwargs): self.type = 'XeroObject' -class XeroSource(CopySource): +class XeroSource(TabularSource): """A copy activity Xero Service source. All required parameters must be populated in order to send to Azure. @@ -29236,6 +30470,10 @@ class XeroSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -29251,6 +30489,7 @@ class XeroSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -29393,7 +30632,7 @@ def __init__(self, **kwargs): self.type = 'ZohoObject' -class ZohoSource(CopySource): +class ZohoSource(TabularSource): """A copy activity Zoho server source. All required parameters must be populated in order to send to Azure. @@ -29414,6 +30653,10 @@ class ZohoSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -29429,6 +30672,7 @@ class ZohoSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 810501b3a5a2..c72789617866 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -286,6 +286,22 @@ def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: self.continuation_token = continuation_token +class AddDataFlowToDebugSessionResponse(Model): + """Response body structure for starting data flow debug session. + + :param job_version: The ID of data flow debug job version. + :type job_version: str + """ + + _attribute_map = { + 'job_version': {'key': 'jobVersion', 'type': 'str'}, + } + + def __init__(self, *, job_version: str=None, **kwargs) -> None: + super(AddDataFlowToDebugSessionResponse, self).__init__(**kwargs) + self.job_version = job_version + + class LinkedService(Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute @@ -501,10 +517,11 @@ class Dataset(Model): MongoDbCollectionDataset, FileShareDataset, Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, CommonDataServiceForAppsEntityDataset, DynamicsCrmEntityDataset, - DynamicsEntityDataset, DocumentDbCollectionDataset, CustomDataset, - CassandraTableDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, - AzureSqlTableDataset, AzureTableDataset, AzureBlobDataset, BinaryDataset, - OrcDataset, JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, + DynamicsEntityDataset, DocumentDbCollectionDataset, + CosmosDbSqlApiCollectionDataset, CustomDataset, CassandraTableDataset, + AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, + AzureTableDataset, AzureBlobDataset, BinaryDataset, OrcDataset, + JsonDataset, DelimitedTextDataset, ParquetDataset, AvroDataset, AmazonS3Dataset All required parameters must be populated in order to send to Azure. @@ -555,7 +572,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: @@ -635,26 +652,13 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, - OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, - SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, - XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, - QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, - AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, - HubspotSource, HiveSource, HBaseSource, GreenplumSource, - GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, - ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, HttpSource, - AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, - CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, CassandraSource, - WebSource, TeradataSource, OracleSource, AzureDataExplorerSource, - AzureMySqlSource, HdfsSource, FileSystemSource, SqlDWSource, SqlMISource, - AzureSqlSource, SqlServerSource, SqlSource, RestSource, SapTableSource, - SapOpenHubSource, SapHanaSource, SapEccSource, SapCloudForCustomerSource, - SalesforceServiceCloudSource, SalesforceSource, ODataSource, SapBwSource, - SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, Db2Source, - MicrosoftAccessSource, InformixSource, RelationalSource, - CommonDataServiceForAppsSource, DynamicsCrmSource, DynamicsSource, - DocumentDbCollectionSource, BlobSource, AzureTableSource, BinarySource, + sub-classes are: HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, + Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, + WebSource, OracleSource, AzureDataExplorerSource, HdfsSource, + FileSystemSource, RestSource, SalesforceServiceCloudSource, ODataSource, + MicrosoftAccessSource, RelationalSource, CommonDataServiceForAppsSource, + DynamicsCrmSource, DynamicsSource, CosmosDbSqlApiSource, + DocumentDbCollectionSource, BlobSource, TabularSource, BinarySource, OrcSource, JsonSource, DelimitedTextSource, ParquetSource, AvroSource All required parameters must be populated in order to send to Azure. @@ -690,7 +694,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'CassandraSource': 'CassandraSource', 'WebSource': 'WebSource', 'TeradataSource': 'TeradataSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureMySqlSource': 'AzureMySqlSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'RestSource': 'RestSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SalesforceSource': 'SalesforceSource', 'ODataSource': 'ODataSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'InformixSource': 'InformixSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'AzureTableSource': 'AzureTableSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + 'type': {'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: @@ -702,7 +706,73 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = None -class AmazonMWSSource(CopySource): +class TabularSource(CopySource): + """Copy activity sources of tabular type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmazonRedshiftSource, GoogleAdWordsSource, + OracleServiceCloudSource, DynamicsAXSource, ResponsysSource, + SalesforceMarketingCloudSource, VerticaSource, NetezzaSource, ZohoSource, + XeroSource, SquareSource, SparkSource, ShopifySource, ServiceNowSource, + QuickBooksSource, PrestoSource, PhoenixSource, PaypalSource, MarketoSource, + AzureMariaDBSource, MariaDBSource, MagentoSource, JiraSource, ImpalaSource, + HubspotSource, HiveSource, HBaseSource, GreenplumSource, + GoogleBigQuerySource, EloquaSource, DrillSource, CouchbaseSource, + ConcurSource, AzurePostgreSqlSource, AmazonMWSSource, CassandraSource, + TeradataSource, AzureMySqlSource, SqlDWSource, SqlMISource, AzureSqlSource, + SqlServerSource, SqlSource, SapTableSource, SapOpenHubSource, + SapHanaSource, SapEccSource, SapCloudForCustomerSource, SalesforceSource, + SapBwSource, SybaseSource, PostgreSqlSource, MySqlSource, OdbcSource, + Db2Source, InformixSource, AzureTableSource + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + } + + _subtype_map = { + 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'CassandraSource': 'CassandraSource', 'TeradataSource': 'TeradataSource', 'AzureMySqlSource': 'AzureMySqlSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'InformixSource': 'InformixSource', 'AzureTableSource': 'AzureTableSource'} + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, **kwargs) -> None: + super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query_timeout = query_timeout + self.type = 'TabularSource' + + +class AmazonMWSSource(TabularSource): """A copy activity Amazon Marketplace Web Service source. All required parameters must be populated in order to send to Azure. @@ -723,6 +793,10 @@ class AmazonMWSSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -738,11 +812,12 @@ class AmazonMWSSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'AmazonMWSSource' @@ -821,7 +896,7 @@ def __init__(self, *, server, database, additional_properties=None, connect_via= self.type = 'AmazonRedshift' -class AmazonRedshiftSource(CopySource): +class AmazonRedshiftSource(TabularSource): """A copy activity source for Amazon Redshift Source. All required parameters must be populated in order to send to Azure. @@ -842,6 +917,10 @@ class AmazonRedshiftSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -863,12 +942,13 @@ class AmazonRedshiftSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, redshift_unload_settings=None, **kwargs) -> None: - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, redshift_unload_settings=None, **kwargs) -> None: + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.redshift_unload_settings = redshift_unload_settings self.type = 'AmazonRedshiftSource' @@ -1538,8 +1618,8 @@ class CopySink(Model): DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, - DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, - ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, + CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, + BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, OrcSink, JsonSink, DelimitedTextSink @@ -1585,7 +1665,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: @@ -2730,11 +2810,12 @@ class ExecutionActivity(Activity): """Base class for all execution activities. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionActivity, DatabricksSparkPythonActivity, - DatabricksSparkJarActivity, DatabricksNotebookActivity, - DataLakeAnalyticsUSQLActivity, AzureMLUpdateResourceActivity, - AzureMLBatchExecutionActivity, GetMetadataActivity, WebActivity, - LookupActivity, AzureDataExplorerCommandActivity, DeleteActivity, + sub-classes are: ExecuteDataFlowActivity, AzureFunctionActivity, + DatabricksSparkPythonActivity, DatabricksSparkJarActivity, + DatabricksNotebookActivity, DataLakeAnalyticsUSQLActivity, + AzureMLUpdateResourceActivity, AzureMLBatchExecutionActivity, + GetMetadataActivity, WebActivity, LookupActivity, + AzureDataExplorerCommandActivity, DeleteActivity, SqlServerStoredProcedureActivity, CustomActivity, ExecuteSSISPackageActivity, HDInsightSparkActivity, HDInsightStreamingActivity, HDInsightMapReduceActivity, @@ -2779,7 +2860,7 @@ class ExecutionActivity(Activity): } _subtype_map = { - 'type': {'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} + 'type': {'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'GetMetadata': 'GetMetadataActivity', 'WebActivity': 'WebActivity', 'Lookup': 'LookupActivity', 'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'Delete': 'DeleteActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'Custom': 'CustomActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'Copy': 'CopyActivity'} } def __init__(self, *, name: str, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, **kwargs) -> None: @@ -3885,7 +3966,7 @@ def __init__(self, *, additional_properties=None, connect_via=None, description: self.type = 'AzureMariaDB' -class AzureMariaDBSource(CopySource): +class AzureMariaDBSource(TabularSource): """A copy activity Azure MariaDB source. All required parameters must be populated in order to send to Azure. @@ -3906,6 +3987,10 @@ class AzureMariaDBSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -3921,11 +4006,12 @@ class AzureMariaDBSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'AzureMariaDBSource' @@ -4355,7 +4441,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.type = 'AzureMySqlSink' -class AzureMySqlSource(CopySource): +class AzureMySqlSource(TabularSource): """A copy activity Azure MySQL source. All required parameters must be populated in order to send to Azure. @@ -4376,6 +4462,10 @@ class AzureMySqlSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -4391,11 +4481,12 @@ class AzureMySqlSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'AzureMySqlSource' @@ -4572,7 +4663,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.type = 'AzurePostgreSqlSink' -class AzurePostgreSqlSource(CopySource): +class AzurePostgreSqlSource(TabularSource): """A copy activity Azure PostgreSQL source. All required parameters must be populated in order to send to Azure. @@ -4593,6 +4684,10 @@ class AzurePostgreSqlSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -4608,11 +4703,12 @@ class AzurePostgreSqlSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'AzurePostgreSqlSource' @@ -5358,7 +5454,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.type = 'AzureSqlSink' -class AzureSqlSource(CopySource): +class AzureSqlSource(TabularSource): """A copy activity Azure SQL source. All required parameters must be populated in order to send to Azure. @@ -5379,6 +5475,10 @@ class AzureSqlSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -5404,14 +5504,15 @@ class AzureSqlSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -5690,7 +5791,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.type = 'AzureTableSink' -class AzureTableSource(CopySource): +class AzureTableSource(TabularSource): """A copy activity Azure Table source. All required parameters must be populated in order to send to Azure. @@ -5711,6 +5812,10 @@ class AzureTableSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object @@ -5730,12 +5835,13 @@ class AzureTableSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found self.type = 'AzureTableSource' @@ -6421,7 +6527,7 @@ def __init__(self, *, host, additional_properties=None, connect_via=None, descri self.type = 'Cassandra' -class CassandraSource(CopySource): +class CassandraSource(TabularSource): """A copy activity source for a Cassandra database. All required parameters must be populated in order to send to Azure. @@ -6442,6 +6548,10 @@ class CassandraSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). @@ -6468,12 +6578,13 @@ class CassandraSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, consistency_level=None, **kwargs) -> None: - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, consistency_level=None, **kwargs) -> None: + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.consistency_level = consistency_level self.type = 'CassandraSource' @@ -7174,7 +7285,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'ConcurObject' -class ConcurSource(CopySource): +class ConcurSource(TabularSource): """A copy activity Concur Service source. All required parameters must be populated in order to send to Azure. @@ -7195,6 +7306,10 @@ class ConcurSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -7210,11 +7325,12 @@ class ConcurSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'ConcurSource' @@ -7352,13 +7468,18 @@ class CosmosDbLinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param connection_string: Required. The connection string. Type: string, + :param connection_string: The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param account_key: The Azure key vault secret reference of accountKey in - connection string. - :type account_key: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param account_endpoint: The endpoint of the Azure CosmosDB account. Type: + string (or Expression with resultType string) + :type account_endpoint: object + :param database: The name of the database. Type: string (or Expression + with resultType string) + :type database: object + :param account_key: The account key of the Azure CosmosDB account. Type: + SecureString or AzureKeyVaultSecretReference. + :type account_key: ~azure.mgmt.datafactory.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -7367,7 +7488,6 @@ class CosmosDbLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'connection_string': {'required': True}, } _attribute_map = { @@ -7378,13 +7498,17 @@ class CosmosDbLinkedService(LinkedService): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'account_endpoint': {'key': 'typeProperties.accountEndpoint', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, account_key=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_endpoint=None, database=None, account_key=None, encrypted_credential=None, **kwargs) -> None: super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string + self.account_endpoint = account_endpoint + self.database = database self.account_key = account_key self.encrypted_credential = encrypted_credential self.type = 'CosmosDb' @@ -7596,6 +7720,10 @@ class CosmosDbMongoDbApiSource(CopySource): main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object """ _validation = { @@ -7611,76 +7739,136 @@ class CosmosDbMongoDbApiSource(CopySource): 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, **kwargs) -> None: super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size + self.query_timeout = query_timeout self.type = 'CosmosDbMongoDbApiSource' -class CouchbaseLinkedService(LinkedService): - """Couchbase server linked service. +class CosmosDbSqlApiCollectionDataset(Dataset): + """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param connect_via: The integration runtime reference. - :type connect_via: - ~azure.mgmt.datafactory.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.mgmt.datafactory.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the - linked service. + Dataset. :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder :param type: Required. Constant filled by server. :type type: str - :param connection_string: An ODBC connection string. Type: string, - SecureString or AzureKeyVaultSecretReference. - :type connection_string: object - :param cred_string: The Azure key vault secret reference of credString in - connection string. - :type cred_string: - ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference - :param encrypted_credential: The encrypted credential used for - authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). - :type encrypted_credential: object + :param collection_name: Required. CosmosDB (SQL API) collection name. + Type: string (or Expression with resultType string). + :type collection_name: object """ _validation = { + 'linked_service_name': {'required': True}, 'type': {'required': True}, + 'collection_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'collection_name': {'key': 'typeProperties.collectionName', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, cred_string=None, encrypted_credential=None, **kwargs) -> None: - super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.cred_string = cred_string - self.encrypted_credential = encrypted_credential - self.type = 'Couchbase' + def __init__(self, *, linked_service_name, collection_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: + super(CosmosDbSqlApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.collection_name = collection_name + self.type = 'CosmosDbSqlApiCollection' -class CouchbaseSource(CopySource): - """A copy activity Couchbase server source. +class CosmosDbSqlApiSink(CopySink): + """A copy activity Azure CosmosDB (SQL API) Collection sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Describes how to write data to Azure Cosmos DB. + Type: string (or Expression with resultType string). Allowed values: + insert and upsert. + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'CosmosDbSqlApiSink' + + +class CosmosDbSqlApiSource(CopySource): + """A copy activity Azure CosmosDB (SQL API) Collection source. All required parameters must be populated in order to send to Azure. @@ -7700,9 +7888,15 @@ class CouchbaseSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str - :param query: A query to retrieve data from source. Type: string (or - Expression with resultType string). + :param query: SQL API query. Type: string (or Expression with resultType + string). :type query: object + :param page_size: Page size of the result. Type: integer (or Expression + with resultType integer). + :type page_size: object + :param preferred_regions: Preferred regions. Type: array of strings (or + Expression with resultType array of strings). + :type preferred_regions: object """ _validation = { @@ -7716,25 +7910,137 @@ class CouchbaseSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'page_size': {'key': 'pageSize', 'type': 'object'}, + 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, page_size=None, preferred_regions=None, **kwargs) -> None: + super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query - self.type = 'CouchbaseSource' + self.page_size = page_size + self.preferred_regions = preferred_regions + self.type = 'CosmosDbSqlApiSource' -class CouchbaseTableDataset(Dataset): - """Couchbase server dataset. +class CouchbaseLinkedService(LinkedService): + """Couchbase server linked service. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param description: Dataset description. - :type description: str - :param structure: Columns that define the structure of the dataset. Type: + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: An ODBC connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param cred_string: The Azure key vault secret reference of credString in + connection string. + :type cred_string: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'cred_string': {'key': 'typeProperties.credString', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, cred_string=None, encrypted_credential=None, **kwargs) -> None: + super(CouchbaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.cred_string = cred_string + self.encrypted_credential = encrypted_credential + self.type = 'Couchbase' + + +class CouchbaseSource(TabularSource): + """A copy activity Couchbase server source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param query: A query to retrieve data from source. Type: string (or + Expression with resultType string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + self.query = query + self.type = 'CouchbaseSource' + + +class CouchbaseTableDataset(Dataset): + """Couchbase server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. :type structure: object :param schema: Columns that define the physical type schema of the @@ -7784,6 +8090,58 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'CouchbaseTable' +class CreateDataFlowDebugSessionRequest(Model): + """Request body structure for creating data flow debug session. + + :param compute_type: Compute type of the cluster. The value will be + overwritten by the same setting in integration runtime if provided. + :type compute_type: str + :param core_count: Core count of the cluster. The value will be + overwritten by the same setting in integration runtime if provided. + :type core_count: int + :param time_to_live: Time to live setting of the cluster in minutes. + :type time_to_live: int + :param integration_runtime: Set to use integration runtime setting for + data flow debug session. + :type integration_runtime: + ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + """ + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + 'integration_runtime': {'key': 'integrationRuntime', 'type': 'IntegrationRuntimeResource'}, + } + + def __init__(self, *, compute_type: str=None, core_count: int=None, time_to_live: int=None, integration_runtime=None, **kwargs) -> None: + super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) + self.compute_type = compute_type + self.core_count = core_count + self.time_to_live = time_to_live + self.integration_runtime = integration_runtime + + +class CreateDataFlowDebugSessionResponse(Model): + """Response body structure for creating data flow debug session. + + :param status: The state of the debug session. + :type status: str + :param session_id: The ID of data flow debug session. + :type session_id: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + } + + def __init__(self, *, status: str=None, session_id: str=None, **kwargs) -> None: + super(CreateDataFlowDebugSessionResponse, self).__init__(**kwargs) + self.status = status + self.session_id = session_id + + class CreateLinkedIntegrationRuntimeRequest(Model): """The linked integration runtime information. @@ -8143,93 +8501,623 @@ class DatabricksSparkJarActivity(ExecutionActivity): _validation = { 'name': {'required': True}, - 'type': {'required': True}, - 'main_class_name': {'required': True}, + 'type': {'required': True}, + 'main_class_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, main_class_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.main_class_name = main_class_name + self.parameters = parameters + self.libraries = libraries + self.type = 'DatabricksSparkJar' + + +class DatabricksSparkPythonActivity(ExecutionActivity): + """DatabricksSparkPython activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param python_file: Required. The URI of the Python file to be executed. + DBFS paths are supported. Type: string (or Expression with resultType + string). + :type python_file: object + :param parameters: Command line parameters that will be passed to the + Python file. + :type parameters: list[object] + :param libraries: A list of libraries to be installed on the cluster that + will execute the job. + :type libraries: list[dict[str, object]] + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'python_file': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, + 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, + 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + } + + def __init__(self, *, name: str, python_file, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: + super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.python_file = python_file + self.parameters = parameters + self.libraries = libraries + self.type = 'DatabricksSparkPython' + + +class DataFlow(Model): + """Azure Data Factory nested object which contains a flow with data movements + and transformations. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: MappingDataFlow + + All required parameters must be populated in order to send to Azure. + + :param description: The description of the data flow. + :type description: str + :param annotations: List of tags that can be used for describing the data + flow. + :type annotations: list[object] + :param folder: The folder that this data flow is in. If not specified, + Data flow will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'MappingDataFlow': 'MappingDataFlow'} + } + + def __init__(self, *, description: str=None, annotations=None, folder=None, **kwargs) -> None: + super(DataFlow, self).__init__(**kwargs) + self.description = description + self.annotations = annotations + self.folder = folder + self.type = None + + +class DataFlowDebugCommandPayload(Model): + """Structure of command payload. + + All required parameters must be populated in order to send to Azure. + + :param stream_name: Required. The stream name which is used for preview. + :type stream_name: str + :param row_limits: Row limits for preview response. + :type row_limits: int + :param columns: Array of column names. + :type columns: list[str] + :param expression: The expression which is used for preview. + :type expression: str + """ + + _validation = { + 'stream_name': {'required': True}, + } + + _attribute_map = { + 'stream_name': {'key': 'streamName', 'type': 'str'}, + 'row_limits': {'key': 'rowLimits', 'type': 'int'}, + 'columns': {'key': 'columns', 'type': '[str]'}, + 'expression': {'key': 'expression', 'type': 'str'}, + } + + def __init__(self, *, stream_name: str, row_limits: int=None, columns=None, expression: str=None, **kwargs) -> None: + super(DataFlowDebugCommandPayload, self).__init__(**kwargs) + self.stream_name = stream_name + self.row_limits = row_limits + self.columns = columns + self.expression = expression + + +class DataFlowDebugCommandRequest(Model): + """Request body structure for data flow debug command. + + :param session_id: The ID of data flow debug session. + :type session_id: str + :param command: The command type. Possible values include: + 'executePreviewQuery', 'executeStatisticsQuery', 'executeExpressionQuery' + :type command: str or + ~azure.mgmt.datafactory.models.DataFlowDebugCommandType + :param command_payload: The command payload object. + :type command_payload: + ~azure.mgmt.datafactory.models.DataFlowDebugCommandPayload + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'command': {'key': 'command', 'type': 'str'}, + 'command_payload': {'key': 'commandPayload', 'type': 'DataFlowDebugCommandPayload'}, + } + + def __init__(self, *, session_id: str=None, command=None, command_payload=None, **kwargs) -> None: + super(DataFlowDebugCommandRequest, self).__init__(**kwargs) + self.session_id = session_id + self.command = command + self.command_payload = command_payload + + +class DataFlowDebugCommandResponse(Model): + """Response body structure of data flow result for data preview, statistics or + expression preview. + + :param status: The run status of data preview, statistics or expression + preview. + :type status: str + :param data: The result data of data preview, statistics or expression + preview. + :type data: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'data': {'key': 'data', 'type': 'str'}, + } + + def __init__(self, *, status: str=None, data: str=None, **kwargs) -> None: + super(DataFlowDebugCommandResponse, self).__init__(**kwargs) + self.status = status + self.data = data + + +class DataFlowDebugPackage(Model): + """Request body structure for starting data flow debug session. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param session_id: The ID of data flow debug session. + :type session_id: str + :param data_flow: Data flow instance. + :type data_flow: ~azure.mgmt.datafactory.models.DataFlowResource + :param datasets: List of datasets. + :type datasets: list[~azure.mgmt.datafactory.models.DatasetResource] + :param linked_services: List of linked services. + :type linked_services: + list[~azure.mgmt.datafactory.models.LinkedServiceResource] + :param staging: Staging info for debug session. + :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :param debug_settings: Data flow debug settings. + :type debug_settings: + ~azure.mgmt.datafactory.models.DataFlowDebugPackageDebugSettings + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowResource'}, + 'datasets': {'key': 'datasets', 'type': '[DatasetResource]'}, + 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceResource]'}, + 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, + 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, + } + + def __init__(self, *, additional_properties=None, session_id: str=None, data_flow=None, datasets=None, linked_services=None, staging=None, debug_settings=None, **kwargs) -> None: + super(DataFlowDebugPackage, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.session_id = session_id + self.data_flow = data_flow + self.datasets = datasets + self.linked_services = linked_services + self.staging = staging + self.debug_settings = debug_settings + + +class DataFlowDebugPackageDebugSettings(Model): + """Data flow debug settings. + + :param source_settings: Source setting for data flow debug. + :type source_settings: + list[~azure.mgmt.datafactory.models.DataFlowSourceSetting] + :param parameters: Data flow parameters. + :type parameters: dict[str, object] + :param dataset_parameters: Parameters for dataset. + :type dataset_parameters: object + """ + + _attribute_map = { + 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + } + + def __init__(self, *, source_settings=None, parameters=None, dataset_parameters=None, **kwargs) -> None: + super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) + self.source_settings = source_settings + self.parameters = parameters + self.dataset_parameters = dataset_parameters + + +class DataFlowDebugSessionInfo(Model): + """Data flow debug session info. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param data_flow_name: The name of the data flow. + :type data_flow_name: str + :param compute_type: Compute type of the cluster. + :type compute_type: str + :param core_count: Core count of the cluster. + :type core_count: int + :param node_count: Node count of the cluster. (deprecated property) + :type node_count: int + :param integration_runtime_name: Attached integration runtime name of data + flow debug session. + :type integration_runtime_name: str + :param session_id: The ID of data flow debug session. + :type session_id: str + :param start_time: Start time of data flow debug session. + :type start_time: str + :param time_to_live_in_minutes: Compute type of the cluster. + :type time_to_live_in_minutes: int + :param last_activity_time: Last activity time of data flow debug session. + :type last_activity_time: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'data_flow_name': {'key': 'dataFlowName', 'type': 'str'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'node_count': {'key': 'nodeCount', 'type': 'int'}, + 'integration_runtime_name': {'key': 'integrationRuntimeName', 'type': 'str'}, + 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_to_live_in_minutes': {'key': 'timeToLiveInMinutes', 'type': 'int'}, + 'last_activity_time': {'key': 'lastActivityTime', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, data_flow_name: str=None, compute_type: str=None, core_count: int=None, node_count: int=None, integration_runtime_name: str=None, session_id: str=None, start_time: str=None, time_to_live_in_minutes: int=None, last_activity_time: str=None, **kwargs) -> None: + super(DataFlowDebugSessionInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.data_flow_name = data_flow_name + self.compute_type = compute_type + self.core_count = core_count + self.node_count = node_count + self.integration_runtime_name = integration_runtime_name + self.session_id = session_id + self.start_time = start_time + self.time_to_live_in_minutes = time_to_live_in_minutes + self.last_activity_time = last_activity_time + + +class DataFlowFolder(Model): + """The folder that this data flow is in. If not specified, Data flow will + appear at the root level. + + :param name: The name of the folder that this data flow is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, **kwargs) -> None: + super(DataFlowFolder, self).__init__(**kwargs) + self.name = name + + +class DataFlowReference(Model): + """Data flow reference type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :ivar type: Required. Data flow reference type. Default value: + "DataFlowReference" . + :vartype type: str + :param reference_name: Required. Reference data flow name. + :type reference_name: str + :param dataset_parameters: Reference data flow parameters from dataset. + :type dataset_parameters: object + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + } + + type = "DataFlowReference" + + def __init__(self, *, reference_name: str, additional_properties=None, dataset_parameters=None, **kwargs) -> None: + super(DataFlowReference, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.reference_name = reference_name + self.dataset_parameters = dataset_parameters + + +class SubResource(Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None + + +class DataFlowResource(SubResource): + """Data flow resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Data flow properties. + :type properties: ~azure.mgmt.datafactory.models.DataFlow + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataFlow'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(DataFlowResource, self).__init__(**kwargs) + self.properties = properties + + +class Transformation(Model): + """A data flow transformation. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__(self, *, name: str, additional_properties=None, description: str=None, **kwargs) -> None: + super(Transformation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.description = description + + +class DataFlowSink(Transformation): + """Transformation for data flow sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Transformation name. + :type name: str + :param description: Transformation description. + :type description: str + :param dataset: Dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference + """ + + _validation = { + 'name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'main_class_name': {'key': 'typeProperties.mainClassName', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, } - def __init__(self, *, name: str, main_class_name, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: - super(DatabricksSparkJarActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.main_class_name = main_class_name - self.parameters = parameters - self.libraries = libraries - self.type = 'DatabricksSparkJar' + def __init__(self, *, name: str, additional_properties=None, description: str=None, dataset=None, **kwargs) -> None: + super(DataFlowSink, self).__init__(additional_properties=additional_properties, name=name, description=description, **kwargs) + self.additional_properties = additional_properties + self.dataset = dataset -class DatabricksSparkPythonActivity(ExecutionActivity): - """DatabricksSparkPython activity. +class DataFlowSource(Transformation): + """Transformation for data flow source. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized this collection :type additional_properties: dict[str, object] - :param name: Required. Activity name. + :param name: Required. Transformation name. :type name: str - :param description: Activity description. + :param description: Transformation description. :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] - :param type: Required. Constant filled by server. - :type type: str - :param linked_service_name: Linked service reference. - :type linked_service_name: - ~azure.mgmt.datafactory.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy - :param python_file: Required. The URI of the Python file to be executed. - DBFS paths are supported. Type: string (or Expression with resultType - string). - :type python_file: object - :param parameters: Command line parameters that will be passed to the - Python file. - :type parameters: list[object] - :param libraries: A list of libraries to be installed on the cluster that - will execute the job. - :type libraries: list[dict[str, object]] + :param dataset: Dataset reference. + :type dataset: ~azure.mgmt.datafactory.models.DatasetReference """ _validation = { 'name': {'required': True}, - 'type': {'required': True}, - 'python_file': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'python_file': {'key': 'typeProperties.pythonFile', 'type': 'object'}, - 'parameters': {'key': 'typeProperties.parameters', 'type': '[object]'}, - 'libraries': {'key': 'typeProperties.libraries', 'type': '[{object}]'}, + 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, } - def __init__(self, *, name: str, python_file, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, parameters=None, libraries=None, **kwargs) -> None: - super(DatabricksSparkPythonActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.python_file = python_file - self.parameters = parameters - self.libraries = libraries - self.type = 'DatabricksSparkPython' + def __init__(self, *, name: str, additional_properties=None, description: str=None, dataset=None, **kwargs) -> None: + super(DataFlowSource, self).__init__(additional_properties=additional_properties, name=name, description=description, **kwargs) + self.additional_properties = additional_properties + self.dataset = dataset + + +class DataFlowSourceSetting(Model): + """Definition of data flow source setting for debug. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_name: The data flow source name. + :type source_name: str + :param row_limit: Defines the row limit of data flow source in debug. + :type row_limit: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_name': {'key': 'sourceName', 'type': 'str'}, + 'row_limit': {'key': 'rowLimit', 'type': 'int'}, + } + + def __init__(self, *, additional_properties=None, source_name: str=None, row_limit: int=None, **kwargs) -> None: + super(DataFlowSourceSetting, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.source_name = source_name + self.row_limit = row_limit + + +class DataFlowStagingInfo(Model): + """Staging info for execute data flow activity. + + :param linked_service: Staging linked service reference. + :type linked_service: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param folder_path: Folder path for staging blob. + :type folder_path: str + """ + + _attribute_map = { + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, + 'folder_path': {'key': 'folderPath', 'type': 'str'}, + } + + def __init__(self, *, linked_service=None, folder_path: str=None, **kwargs) -> None: + super(DataFlowStagingInfo, self).__init__(**kwargs) + self.linked_service = linked_service + self.folder_path = folder_path class DataLakeAnalyticsUSQLActivity(ExecutionActivity): @@ -8492,44 +9380,6 @@ def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: self.parameters = parameters -class SubResource(Model): - """Azure Data Factory nested resource, which belongs to a factory. - - Variables are only populated by the server, and will be ignored when - sending a request. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__(self, **kwargs) -> None: - super(SubResource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.etag = None - - class DatasetResource(SubResource): """Dataset resource type. @@ -8675,7 +9525,7 @@ def __init__(self, *, server, database, additional_properties=None, connect_via= self.type = 'Db2' -class Db2Source(CopySource): +class Db2Source(TabularSource): """A copy activity source for Db2 databases. All required parameters must be populated in order to send to Azure. @@ -8696,6 +9546,10 @@ class Db2Source(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -8711,11 +9565,12 @@ class Db2Source(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'Db2Source' @@ -8865,6 +9720,22 @@ def __init__(self, *, name: str, dataset, additional_properties=None, descriptio self.type = 'Delete' +class DeleteDataFlowDebugSessionRequest(Model): + """Request body structure for deleting data flow debug session. + + :param session_id: The ID of data flow debug session. + :type session_id: str + """ + + _attribute_map = { + 'session_id': {'key': 'sessionId', 'type': 'str'}, + } + + def __init__(self, *, session_id: str=None, **kwargs) -> None: + super(DeleteDataFlowDebugSessionRequest, self).__init__(**kwargs) + self.session_id = session_id + + class DelimitedTextDataset(Dataset): """Delimited text dataset. @@ -9337,7 +10208,8 @@ class DocumentDbCollectionSink(CopySink): Type: string (or Expression with resultType string). :type nesting_separator: object :param write_behavior: Describes how to write data to Azure Cosmos DB. - Allowed values: insert and upsert. + Type: string (or Expression with resultType string). Allowed values: + insert and upsert. :type write_behavior: object """ @@ -9391,6 +10263,10 @@ class DocumentDbCollectionSource(CopySource): :param nesting_separator: Nested properties separator. Type: string (or Expression with resultType string). :type nesting_separator: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object """ _validation = { @@ -9405,12 +10281,14 @@ class DocumentDbCollectionSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, query_timeout=None, **kwargs) -> None: super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.nesting_separator = nesting_separator + self.query_timeout = query_timeout self.type = 'DocumentDbCollectionSource' @@ -9471,7 +10349,7 @@ def __init__(self, *, additional_properties=None, connect_via=None, description: self.type = 'Drill' -class DrillSource(CopySource): +class DrillSource(TabularSource): """A copy activity Drill server source. All required parameters must be populated in order to send to Azure. @@ -9492,6 +10370,10 @@ class DrillSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -9507,11 +10389,12 @@ class DrillSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'DrillSource' @@ -9728,7 +10611,7 @@ def __init__(self, *, linked_service_name, path, additional_properties=None, des self.type = 'DynamicsAXResource' -class DynamicsAXSource(CopySource): +class DynamicsAXSource(TabularSource): """A copy activity Dynamics AX source. All required parameters must be populated in order to send to Azure. @@ -9749,6 +10632,10 @@ class DynamicsAXSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -9764,11 +10651,12 @@ class DynamicsAXSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'DynamicsAXSource' @@ -10463,7 +11351,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'EloquaObject' -class EloquaSource(CopySource): +class EloquaSource(TabularSource): """A copy activity Eloqua server source. All required parameters must be populated in order to send to Azure. @@ -10484,6 +11372,10 @@ class EloquaSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -10499,11 +11391,12 @@ class EloquaSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'EloquaSource' @@ -10562,6 +11455,66 @@ def __init__(self, *, variable_name: str, variable_value: str, **kwargs) -> None self.type = 'EnvironmentVariableSetup' +class ExecuteDataFlowActivity(ExecutionActivity): + """Execute data flow activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.mgmt.datafactory.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.mgmt.datafactory.models.UserProperty] + :param type: Required. Constant filled by server. + :type type: str + :param linked_service_name: Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.mgmt.datafactory.models.ActivityPolicy + :param data_flow: Required. Data flow reference. + :type data_flow: ~azure.mgmt.datafactory.models.DataFlowReference + :param staging: Staging info for execute data flow activity. + :type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo + :param integration_runtime: The integration runtime reference. + :type integration_runtime: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'data_flow': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, + 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, + 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, + } + + def __init__(self, *, name: str, data_flow, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, staging=None, integration_runtime=None, **kwargs) -> None: + super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.data_flow = data_flow + self.staging = staging + self.integration_runtime = integration_runtime + self.type = 'ExecuteDataFlow' + + class ExecutePipelineActivity(ControlActivity): """Execute pipeline activity. @@ -11806,6 +12759,27 @@ def __init__(self, *, type: str, additional_properties=None, folder_path=None, f super(FtpServerLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) +class GetDataFactoryOperationStatusResponse(Model): + """Response body structure for get data factory operation status. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param status: Status of the operation. + :type status: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__(self, *, additional_properties=None, status: str=None, **kwargs) -> None: + super(GetDataFactoryOperationStatusResponse, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.status = status + + class GetMetadataActivity(ExecutionActivity): """Activity to get metadata of dataset. @@ -12092,7 +13066,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'GoogleAdWordsObject' -class GoogleAdWordsSource(CopySource): +class GoogleAdWordsSource(TabularSource): """A copy activity Google AdWords service source. All required parameters must be populated in order to send to Azure. @@ -12113,6 +13087,10 @@ class GoogleAdWordsSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12128,11 +13106,12 @@ class GoogleAdWordsSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'GoogleAdWordsSource' @@ -12319,7 +13298,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'GoogleBigQueryObject' -class GoogleBigQuerySource(CopySource): +class GoogleBigQuerySource(TabularSource): """A copy activity Google BigQuery service source. All required parameters must be populated in order to send to Azure. @@ -12340,6 +13319,10 @@ class GoogleBigQuerySource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12355,11 +13338,12 @@ class GoogleBigQuerySource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'GoogleBigQuerySource' @@ -12421,7 +13405,7 @@ def __init__(self, *, additional_properties=None, connect_via=None, description: self.type = 'Greenplum' -class GreenplumSource(CopySource): +class GreenplumSource(TabularSource): """A copy activity Greenplum Database source. All required parameters must be populated in order to send to Azure. @@ -12442,6 +13426,10 @@ class GreenplumSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12457,11 +13445,12 @@ class GreenplumSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'GreenplumSource' @@ -12698,7 +13687,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'HBaseObject' -class HBaseSource(CopySource): +class HBaseSource(TabularSource): """A copy activity HBase server source. All required parameters must be populated in order to send to Azure. @@ -12719,6 +13708,10 @@ class HBaseSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12734,11 +13727,12 @@ class HBaseSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'HBaseSource' @@ -13918,7 +14912,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'HiveObject' -class HiveSource(CopySource): +class HiveSource(TabularSource): """A copy activity Hive Server source. All required parameters must be populated in order to send to Azure. @@ -13939,6 +14933,10 @@ class HiveSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13954,11 +14952,12 @@ class HiveSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'HiveSource' @@ -14424,7 +15423,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'HubspotObject' -class HubspotSource(CopySource): +class HubspotSource(TabularSource): """A copy activity Hubspot Service source. All required parameters must be populated in order to send to Azure. @@ -14445,6 +15444,10 @@ class HubspotSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14460,11 +15463,12 @@ class HubspotSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'HubspotSource' @@ -14704,7 +15708,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'ImpalaObject' -class ImpalaSource(CopySource): +class ImpalaSource(TabularSource): """A copy activity Impala server source. All required parameters must be populated in order to send to Azure. @@ -14725,6 +15729,10 @@ class ImpalaSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14740,11 +15748,12 @@ class ImpalaSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'ImpalaSource' @@ -14877,7 +15886,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.type = 'InformixSink' -class InformixSource(CopySource): +class InformixSource(TabularSource): """A copy activity source for Informix. All required parameters must be populated in order to send to Azure. @@ -14898,6 +15907,10 @@ class InformixSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -14913,11 +15926,12 @@ class InformixSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'InformixSource' @@ -15777,7 +16791,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'JiraObject' -class JiraSource(CopySource): +class JiraSource(TabularSource): """A copy activity Jira Service source. All required parameters must be populated in order to send to Azure. @@ -15798,6 +16812,10 @@ class JiraSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15813,11 +16831,12 @@ class JiraSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'JiraSource' @@ -16560,7 +17579,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'MagentoObject' -class MagentoSource(CopySource): +class MagentoSource(TabularSource): """A copy activity Magento server source. All required parameters must be populated in order to send to Azure. @@ -16581,6 +17600,10 @@ class MagentoSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16596,11 +17619,12 @@ class MagentoSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'MagentoSource' @@ -16860,6 +17884,55 @@ def __init__(self, *, additional_properties=None, **kwargs) -> None: self.type = 'Managed' +class MappingDataFlow(DataFlow): + """Mapping data flow. + + All required parameters must be populated in order to send to Azure. + + :param description: The description of the data flow. + :type description: str + :param annotations: List of tags that can be used for describing the data + flow. + :type annotations: list[object] + :param folder: The folder that this data flow is in. If not specified, + Data flow will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :param type: Required. Constant filled by server. + :type type: str + :param sources: List of sources in data flow. + :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :param sinks: List of sinks in data flow. + :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :param transformations: List of transformations in data flow. + :type transformations: list[~azure.mgmt.datafactory.models.Transformation] + :param script: DataFlow script. + :type script: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, + 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, + 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, + 'script': {'key': 'typeProperties.script', 'type': 'str'}, + } + + def __init__(self, *, description: str=None, annotations=None, folder=None, sources=None, sinks=None, transformations=None, script: str=None, **kwargs) -> None: + super(MappingDataFlow, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) + self.sources = sources + self.sinks = sinks + self.transformations = transformations + self.script = script + self.type = 'MappingDataFlow' + + class MariaDBLinkedService(LinkedService): """MariaDB server linked service. @@ -16917,7 +17990,7 @@ def __init__(self, *, additional_properties=None, connect_via=None, description: self.type = 'MariaDB' -class MariaDBSource(CopySource): +class MariaDBSource(TabularSource): """A copy activity MariaDB server source. All required parameters must be populated in order to send to Azure. @@ -16938,6 +18011,10 @@ class MariaDBSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16953,11 +18030,12 @@ class MariaDBSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'MariaDBSource' @@ -17160,7 +18238,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'MarketoObject' -class MarketoSource(CopySource): +class MarketoSource(TabularSource): """A copy activity Marketo server source. All required parameters must be populated in order to send to Azure. @@ -17181,6 +18259,10 @@ class MarketoSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -17196,11 +18278,12 @@ class MarketoSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'MarketoSource' @@ -17831,6 +18914,10 @@ class MongoDbV2Source(CopySource): main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object """ _validation = { @@ -17846,13 +18933,15 @@ class MongoDbV2Source(CopySource): 'filter': {'key': 'filter', 'type': 'object'}, 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, **kwargs) -> None: super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size + self.query_timeout = query_timeout self.type = 'MongoDbV2Source' @@ -17914,7 +19003,7 @@ def __init__(self, *, connection_string, additional_properties=None, connect_via self.type = 'MySql' -class MySqlSource(CopySource): +class MySqlSource(TabularSource): """A copy activity source for MySQL databases. All required parameters must be populated in order to send to Azure. @@ -17935,6 +19024,10 @@ class MySqlSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -17950,11 +19043,12 @@ class MySqlSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'MySqlSource' @@ -18106,7 +19200,7 @@ def __init__(self, *, partition_column_name=None, partition_upper_bound=None, pa self.partition_lower_bound = partition_lower_bound -class NetezzaSource(CopySource): +class NetezzaSource(TabularSource): """A copy activity Netezza source. All required parameters must be populated in order to send to Azure. @@ -18127,6 +19221,10 @@ class NetezzaSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18151,13 +19249,14 @@ class NetezzaSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.partition_option = partition_option self.partition_settings = partition_settings @@ -18582,7 +19681,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.type = 'OdbcSink' -class OdbcSource(CopySource): +class OdbcSource(TabularSource): """A copy activity source for ODBC databases. All required parameters must be populated in order to send to Azure. @@ -18603,6 +19702,10 @@ class OdbcSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -18618,11 +19721,12 @@ class OdbcSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'OdbcSource' @@ -19340,7 +20444,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'OracleServiceCloudObject' -class OracleServiceCloudSource(CopySource): +class OracleServiceCloudSource(TabularSource): """A copy activity Oracle Service Cloud source. All required parameters must be populated in order to send to Azure. @@ -19361,6 +20465,10 @@ class OracleServiceCloudSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -19376,11 +20484,12 @@ class OracleServiceCloudSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'OracleServiceCloudSource' @@ -20126,7 +21235,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'PaypalObject' -class PaypalSource(CopySource): +class PaypalSource(TabularSource): """A copy activity Paypal Service source. All required parameters must be populated in order to send to Azure. @@ -20147,6 +21256,10 @@ class PaypalSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20162,11 +21275,12 @@ class PaypalSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'PaypalSource' @@ -20350,7 +21464,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'PhoenixObject' -class PhoenixSource(CopySource): +class PhoenixSource(TabularSource): """A copy activity Phoenix server source. All required parameters must be populated in order to send to Azure. @@ -20371,6 +21485,10 @@ class PhoenixSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20386,11 +21504,12 @@ class PhoenixSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'PhoenixSource' @@ -20775,7 +21894,7 @@ def __init__(self, *, connection_string, additional_properties=None, connect_via self.type = 'PostgreSql' -class PostgreSqlSource(CopySource): +class PostgreSqlSource(TabularSource): """A copy activity source for PostgreSQL databases. All required parameters must be populated in order to send to Azure. @@ -20796,6 +21915,10 @@ class PostgreSqlSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -20811,11 +21934,12 @@ class PostgreSqlSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'PostgreSqlSource' @@ -21080,7 +22204,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'PrestoObject' -class PrestoSource(CopySource): +class PrestoSource(TabularSource): """A copy activity Presto server source. All required parameters must be populated in order to send to Azure. @@ -21101,6 +22225,10 @@ class PrestoSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21116,11 +22244,12 @@ class PrestoSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'PrestoSource' @@ -21273,7 +22402,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'QuickBooksObject' -class QuickBooksSource(CopySource): +class QuickBooksSource(TabularSource): """A copy activity QuickBooks server source. All required parameters must be populated in order to send to Azure. @@ -21294,6 +22423,10 @@ class QuickBooksSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21309,11 +22442,12 @@ class QuickBooksSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'QuickBooksSource' @@ -21843,7 +22977,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'ResponsysObject' -class ResponsysSource(CopySource): +class ResponsysSource(TabularSource): """A copy activity Responsys source. All required parameters must be populated in order to send to Azure. @@ -21864,6 +22998,10 @@ class ResponsysSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21879,11 +23017,12 @@ class ResponsysSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'ResponsysSource' @@ -22490,7 +23629,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'SalesforceMarketingCloudObject' -class SalesforceMarketingCloudSource(CopySource): +class SalesforceMarketingCloudSource(TabularSource): """A copy activity Salesforce Marketing Cloud source. All required parameters must be populated in order to send to Azure. @@ -22511,6 +23650,10 @@ class SalesforceMarketingCloudSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22526,11 +23669,12 @@ class SalesforceMarketingCloudSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'SalesforceMarketingCloudSource' @@ -22925,7 +24069,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.type = 'SalesforceSink' -class SalesforceSource(CopySource): +class SalesforceSource(TabularSource): """A copy activity Salesforce source. All required parameters must be populated in order to send to Azure. @@ -22946,6 +24090,10 @@ class SalesforceSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -22965,12 +24113,13 @@ class SalesforceSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.read_behavior = read_behavior self.type = 'SalesforceSource' @@ -23107,7 +24256,7 @@ def __init__(self, *, server, system_number, client_id, additional_properties=No self.type = 'SapBW' -class SapBwSource(CopySource): +class SapBwSource(TabularSource): """A copy activity source for SapBW server via MDX. All required parameters must be populated in order to send to Azure. @@ -23128,6 +24277,10 @@ class SapBwSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: MDX query. Type: string (or Expression with resultType string). :type query: object @@ -23143,11 +24296,12 @@ class SapBwSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'SapBwSource' @@ -23332,7 +24486,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.type = 'SapCloudForCustomerSink' -class SapCloudForCustomerSource(CopySource): +class SapCloudForCustomerSource(TabularSource): """A copy activity source for SAP Cloud for Customer source. All required parameters must be populated in order to send to Azure. @@ -23353,6 +24507,10 @@ class SapCloudForCustomerSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -23368,11 +24526,12 @@ class SapCloudForCustomerSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'SapCloudForCustomerSource' @@ -23502,7 +24661,7 @@ def __init__(self, *, linked_service_name, path, additional_properties=None, des self.type = 'SapEccResource' -class SapEccSource(CopySource): +class SapEccSource(TabularSource): """A copy activity source for SAP ECC source. All required parameters must be populated in order to send to Azure. @@ -23523,6 +24682,10 @@ class SapEccSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -23538,11 +24701,12 @@ class SapEccSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'SapEccSource' @@ -23620,7 +24784,7 @@ def __init__(self, *, server, additional_properties=None, connect_via=None, desc self.type = 'SapHana' -class SapHanaSource(CopySource): +class SapHanaSource(TabularSource): """A copy activity source for SAP HANA source. All required parameters must be populated in order to send to Azure. @@ -23641,6 +24805,10 @@ class SapHanaSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object @@ -23659,12 +24827,13 @@ class SapHanaSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, packet_size=None, **kwargs) -> None: - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, packet_size=None, **kwargs) -> None: + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.packet_size = packet_size self.type = 'SapHanaSource' @@ -23822,7 +24991,7 @@ def __init__(self, *, server, system_number, client_id, additional_properties=No self.type = 'SapOpenHub' -class SapOpenHubSource(CopySource): +class SapOpenHubSource(TabularSource): """A copy activity source for SAP Business Warehouse Open Hub Destination source. @@ -23844,6 +25013,10 @@ class SapOpenHubSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). @@ -23865,12 +25038,13 @@ class SapOpenHubSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id self.type = 'SapOpenHubSource' @@ -24175,7 +25349,7 @@ def __init__(self, *, linked_service_name, table_name, additional_properties=Non self.type = 'SapTableResource' -class SapTableSource(CopySource): +class SapTableSource(TabularSource): """A copy activity source for SAP Table source. All required parameters must be populated in order to send to Azure. @@ -24196,6 +25370,10 @@ class SapTableSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object @@ -24240,6 +25418,7 @@ class SapTableSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, @@ -24250,8 +25429,8 @@ class SapTableSource(CopySource): 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.row_count = row_count self.row_skips = row_skips self.rfc_table_fields = rfc_table_fields @@ -24905,7 +26084,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'ServiceNowObject' -class ServiceNowSource(CopySource): +class ServiceNowSource(TabularSource): """A copy activity ServiceNow server source. All required parameters must be populated in order to send to Azure. @@ -24926,6 +26105,10 @@ class ServiceNowSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24941,11 +26124,12 @@ class ServiceNowSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'ServiceNowSource' @@ -25327,7 +26511,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'ShopifyObject' -class ShopifySource(CopySource): +class ShopifySource(TabularSource): """A copy activity Shopify Service source. All required parameters must be populated in order to send to Azure. @@ -25348,6 +26532,10 @@ class ShopifySource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -25363,11 +26551,12 @@ class ShopifySource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'ShopifySource' @@ -25561,7 +26750,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'SparkObject' -class SparkSource(CopySource): +class SparkSource(TabularSource): """A copy activity Spark Server source. All required parameters must be populated in order to send to Azure. @@ -25582,6 +26771,10 @@ class SparkSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -25597,11 +26790,12 @@ class SparkSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'SparkSource' @@ -25677,7 +26871,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.type = 'SqlDWSink' -class SqlDWSource(CopySource): +class SqlDWSource(TabularSource): """A copy activity SQL Data Warehouse source. All required parameters must be populated in order to send to Azure. @@ -25698,6 +26892,10 @@ class SqlDWSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -25722,13 +26920,14 @@ class SqlDWSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -25816,7 +27015,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.type = 'SqlMISink' -class SqlMISource(CopySource): +class SqlMISource(TabularSource): """A copy activity Azure SQL Managed Instance source. All required parameters must be populated in order to send to Azure. @@ -25837,6 +27036,10 @@ class SqlMISource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -25862,14 +27065,15 @@ class SqlMISource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -26020,7 +27224,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.type = 'SqlServerSink' -class SqlServerSource(CopySource): +class SqlServerSource(TabularSource): """A copy activity SQL server source. All required parameters must be populated in order to send to Azure. @@ -26041,6 +27245,10 @@ class SqlServerSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -26066,14 +27274,15 @@ class SqlServerSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -26290,7 +27499,7 @@ def __init__(self, *, additional_properties=None, write_batch_size=None, write_b self.type = 'SqlSink' -class SqlSource(CopySource): +class SqlSource(TabularSource): """A copy activity SQL source. All required parameters must be populated in order to send to Azure. @@ -26311,6 +27520,10 @@ class SqlSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -26334,13 +27547,14 @@ class SqlSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -26493,7 +27707,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'SquareObject' -class SquareSource(CopySource): +class SquareSource(TabularSource): """A copy activity Square Service source. All required parameters must be populated in order to send to Azure. @@ -26514,6 +27728,10 @@ class SquareSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -26529,11 +27747,12 @@ class SquareSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'SquareSource' @@ -27264,7 +28483,7 @@ def __init__(self, *, server, database, additional_properties=None, connect_via= self.type = 'Sybase' -class SybaseSource(CopySource): +class SybaseSource(TabularSource): """A copy activity source for Sybase databases. All required parameters must be populated in order to send to Azure. @@ -27285,6 +28504,10 @@ class SybaseSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -27300,11 +28523,12 @@ class SybaseSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'SybaseSource' @@ -27471,7 +28695,7 @@ def __init__(self, *, partition_column_name=None, partition_upper_bound=None, pa self.partition_lower_bound = partition_lower_bound -class TeradataSource(CopySource): +class TeradataSource(TabularSource): """A copy activity Teradata source. All required parameters must be populated in order to send to Azure. @@ -27492,6 +28716,10 @@ class TeradataSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object @@ -27516,13 +28744,14 @@ class TeradataSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.partition_option = partition_option self.partition_settings = partition_settings @@ -28407,7 +29636,7 @@ def __init__(self, *, additional_properties=None, connect_via=None, description: self.type = 'Vertica' -class VerticaSource(CopySource): +class VerticaSource(TabularSource): """A copy activity Vertica source. All required parameters must be populated in order to send to Azure. @@ -28428,6 +29657,10 @@ class VerticaSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -28443,11 +29676,12 @@ class VerticaSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'VerticaSource' @@ -29215,7 +30449,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'XeroObject' -class XeroSource(CopySource): +class XeroSource(TabularSource): """A copy activity Xero Service source. All required parameters must be populated in order to send to Azure. @@ -29236,6 +30470,10 @@ class XeroSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -29251,11 +30489,12 @@ class XeroSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'XeroSource' @@ -29393,7 +30632,7 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'ZohoObject' -class ZohoSource(CopySource): +class ZohoSource(TabularSource): """A copy activity Zoho server source. All required parameters must be populated in order to send to Azure. @@ -29414,6 +30653,10 @@ class ZohoSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param query_timeout: Query timeout. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -29429,10 +30672,11 @@ class ZohoSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) self.query = query self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py index 4092d2143a7c..f78455cfdb9a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py @@ -116,3 +116,29 @@ class RerunTriggerResourcePaged(Paged): def __init__(self, *args, **kwargs): super(RerunTriggerResourcePaged, self).__init__(*args, **kwargs) +class DataFlowResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`DataFlowResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[DataFlowResource]'} + } + + def __init__(self, *args, **kwargs): + + super(DataFlowResourcePaged, self).__init__(*args, **kwargs) +class DataFlowDebugSessionInfoPaged(Paged): + """ + A paging container for iterating over a list of :class:`DataFlowDebugSessionInfo ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[DataFlowDebugSessionInfo]'} + } + + def __init__(self, *args, **kwargs): + + super(DataFlowDebugSessionInfoPaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index 013675bd0200..619150f2d6a8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -23,6 +23,8 @@ from ._triggers_operations import TriggersOperations from ._trigger_runs_operations import TriggerRunsOperations from ._rerun_triggers_operations import RerunTriggersOperations +from ._data_flows_operations import DataFlowsOperations +from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations __all__ = [ 'Operations', @@ -39,4 +41,6 @@ 'TriggersOperations', 'TriggerRunsOperations', 'RerunTriggersOperations', + 'DataFlowsOperations', + 'DataFlowDebugSessionOperations', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py new file mode 100644 index 000000000000..8281c8a22a5d --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py @@ -0,0 +1,390 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class DataFlowDebugSessionOperations(object): + """DataFlowDebugSessionOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def create( + self, resource_group_name, factory_name, request, custom_headers=None, raw=False, **operation_config): + """Creates a data flow debug session. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param request: Data flow debug session definition + :type request: + ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: CreateDataFlowDebugSessionResponse or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.create.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', response) + header_dict = { + 'location': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} + + def query_by_factory( + self, resource_group_name, factory_name, custom_headers=None, raw=False, **operation_config): + """Query all active data flow debug sessions. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of DataFlowDebugSessionInfo + :rtype: + ~azure.mgmt.datafactory.models.DataFlowDebugSessionInfoPaged[~azure.mgmt.datafactory.models.DataFlowDebugSessionInfo] + :raises: :class:`CloudError` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.query_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.DataFlowDebugSessionInfoPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryDataFlowDebugSessions'} + + def add_data_flow( + self, resource_group_name, factory_name, request, custom_headers=None, raw=False, **operation_config): + """Add a data flow into debug session. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param request: Data flow debug session definition with debug content. + :type request: ~azure.mgmt.datafactory.models.DataFlowDebugPackage + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: AddDataFlowToDebugSessionResponse or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.datafactory.models.AddDataFlowToDebugSessionResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.add_data_flow.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(request, 'DataFlowDebugPackage') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('AddDataFlowToDebugSessionResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + add_data_flow.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/addDataFlowToDebugSession'} + + def delete( + self, resource_group_name, factory_name, session_id=None, custom_headers=None, raw=False, **operation_config): + """Deletes a data flow debug session. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param session_id: The ID of data flow debug session. + :type session_id: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) + + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession'} + + def execute_command( + self, resource_group_name, factory_name, request, custom_headers=None, raw=False, **operation_config): + """Execute a data flow debug command. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param request: Data flow debug command definition. + :type request: + ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DataFlowDebugCommandResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.execute_command.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + header_dict = {} + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataFlowDebugCommandResponse', response) + header_dict = { + 'location': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py new file mode 100644 index 000000000000..435fce439be5 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flows_operations.py @@ -0,0 +1,316 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class DataFlowsOperations(object): + """DataFlowsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def create_or_update( + self, resource_group_name, factory_name, data_flow_name, properties, if_match=None, custom_headers=None, raw=False, **operation_config): + """Creates or updates a data flow. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param data_flow_name: The data flow name. + :type data_flow_name: str + :param properties: Data flow properties. + :type properties: ~azure.mgmt.datafactory.models.DataFlow + :param if_match: ETag of the data flow entity. Should only be + specified for update, for which it should match existing entity or can + be * for unconditional update. + :type if_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DataFlowResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.DataFlowResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + data_flow = models.DataFlowResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(data_flow, 'DataFlowResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataFlowResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} + + def get( + self, resource_group_name, factory_name, data_flow_name, if_none_match=None, custom_headers=None, raw=False, **operation_config): + """Gets a data flow. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param data_flow_name: The data flow name. + :type data_flow_name: str + :param if_none_match: ETag of the data flow entity. Should only be + specified for get. If the ETag matches the existing entity tag, or if + * was provided, then no content will be returned. + :type if_none_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DataFlowResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.DataFlowResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataFlowResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} + + def delete( + self, resource_group_name, factory_name, data_flow_name, custom_headers=None, raw=False, **operation_config): + """Deletes a data flow. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param data_flow_name: The data flow name. + :type data_flow_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'dataFlowName': self._serialize.url("data_flow_name", data_flow_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows/{dataFlowName}'} + + def list_by_factory( + self, resource_group_name, factory_name, custom_headers=None, raw=False, **operation_config): + """Lists data flows. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of DataFlowResource + :rtype: + ~azure.mgmt.datafactory.models.DataFlowResourcePaged[~azure.mgmt.datafactory.models.DataFlowResource] + :raises: :class:`CloudError` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.DataFlowResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/dataflows'}