From d1f90db48b76e5ab425862af532bdd999b6eb40e Mon Sep 17 00:00:00 2001 From: Xiang Yan Date: Mon, 29 Mar 2021 09:34:43 -0700 Subject: [PATCH 1/3] add adf support --- .../azure-synapse-artifacts/CHANGELOG.md | 16 +- .../azure/synapse/artifacts/_version.py | 2 +- .../synapse/artifacts/models/__init__.py | 168 +- .../models/_artifacts_client_enums.py | 52 +- .../azure/synapse/artifacts/models/_models.py | 3901 ++++++++++- .../synapse/artifacts/models/_models_py3.py | 5713 ++++++++++++++--- 6 files changed, 8926 insertions(+), 926 deletions(-) diff --git a/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md b/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md index b4cddc36faa6..e2354a9de8c6 100644 --- a/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md +++ b/sdk/synapse/azure-synapse-artifacts/CHANGELOG.md @@ -1,33 +1,39 @@ # Release History +## 0.6.0 (2021-04-06) + +### New Features + +- Add ADF support + ## 0.5.0 (2021-03-09) -** Features ** +### New Features - Add library operations - Change create_or_update_sql_script, delete_sql_script, rename_sql_script to long running operations -** Breaking changes ** +### Breaking changes - Stop Python 3.5 support ## 0.4.0 (2020-12-08) -** Features ** +### New Features - Add Workspace git repo management operations - Add rename method for data flow, dataset, linked service, notebook, pipeline, spark job definition, sql script operations ## 0.3.0 (2020-09-15) -** Features ** +### New Features - Add Workspace operations - Add SqlPools operations - Add BigDataPools operations - Add IntegrationRuntimes operations -** Breaking changes ** +### Breaking changes - Migrated most long running operation to polling mechanism (operation now starts with `begin`) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py index c4551baee432..9d17420e1c80 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "0.5.0" +VERSION = "0.6.0" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py index 08f76e9320ef..5f143cc9aa5f 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py @@ -13,12 +13,14 @@ from ._models_py3 import ActivityRun from ._models_py3 import ActivityRunsQueryResponse from ._models_py3 import AddDataFlowToDebugSessionResponse + from ._models_py3 import AdditionalColumns from ._models_py3 import AmazonMWSLinkedService from ._models_py3 import AmazonMWSObjectDataset from ._models_py3 import AmazonMWSSource from ._models_py3 import AmazonRedshiftLinkedService from ._models_py3 import AmazonRedshiftSource from ._models_py3 import AmazonRedshiftTableDataset + from ._models_py3 import AmazonS3Dataset from ._models_py3 import AmazonS3LinkedService from ._models_py3 import AmazonS3Location from ._models_py3 import AmazonS3ReadSettings @@ -32,6 +34,8 @@ from ._models_py3 import AvroSource from ._models_py3 import AvroWriteSettings from ._models_py3 import AzureBatchLinkedService + from ._models_py3 import AzureBlobDataset + from ._models_py3 import AzureBlobFSDataset from ._models_py3 import AzureBlobFSLinkedService from ._models_py3 import AzureBlobFSLocation from ._models_py3 import AzureBlobFSReadSettings @@ -48,17 +52,25 @@ from ._models_py3 import AzureDataExplorerSource from ._models_py3 import AzureDataExplorerTableDataset from ._models_py3 import AzureDataLakeAnalyticsLinkedService + from ._models_py3 import AzureDataLakeStoreDataset from ._models_py3 import AzureDataLakeStoreLinkedService from ._models_py3 import AzureDataLakeStoreLocation from ._models_py3 import AzureDataLakeStoreReadSettings from ._models_py3 import AzureDataLakeStoreSink from ._models_py3 import AzureDataLakeStoreSource from ._models_py3 import AzureDataLakeStoreWriteSettings + from ._models_py3 import AzureDatabricksDeltaLakeDataset + from ._models_py3 import AzureDatabricksDeltaLakeExportCommand + from ._models_py3 import AzureDatabricksDeltaLakeImportCommand + from ._models_py3 import AzureDatabricksDeltaLakeLinkedService + from ._models_py3 import AzureDatabricksDeltaLakeSink + from ._models_py3 import AzureDatabricksDeltaLakeSource from ._models_py3 import AzureDatabricksLinkedService from ._models_py3 import AzureEntityResource from ._models_py3 import AzureFileStorageLinkedService from ._models_py3 import AzureFileStorageLocation from ._models_py3 import AzureFileStorageReadSettings + from ._models_py3 import AzureFileStorageWriteSettings from ._models_py3 import AzureFunctionActivity from ._models_py3 import AzureFunctionLinkedService from ._models_py3 import AzureKeyVaultLinkedService @@ -101,6 +113,7 @@ from ._models_py3 import BigDataPoolResourceInfo from ._models_py3 import BigDataPoolResourceInfoListResult from ._models_py3 import BinaryDataset + from ._models_py3 import BinaryReadSettings from ._models_py3 import BinarySink from ._models_py3 import BinarySource from ._models_py3 import BlobEventsTrigger @@ -116,11 +129,13 @@ from ._models_py3 import CommonDataServiceForAppsLinkedService from ._models_py3 import CommonDataServiceForAppsSink from ._models_py3 import CommonDataServiceForAppsSource + from ._models_py3 import CompressionReadSettings from ._models_py3 import ConcurLinkedService from ._models_py3 import ConcurObjectDataset from ._models_py3 import ConcurSource from ._models_py3 import ControlActivity from ._models_py3 import CopyActivity + from ._models_py3 import CopyActivityLogSettings from ._models_py3 import CopySink from ._models_py3 import CopySource from ._models_py3 import CopyTranslator @@ -142,6 +157,7 @@ from ._models_py3 import CustomActivityReferenceObject from ._models_py3 import CustomDataSourceLinkedService from ._models_py3 import CustomDataset + from ._models_py3 import CustomEventsTrigger from ._models_py3 import CustomSetupBase from ._models_py3 import CustomerManagedKeyDetails from ._models_py3 import DWCopyCommandDefaultValue @@ -184,6 +200,8 @@ from ._models_py3 import DatasetResource from ._models_py3 import DatasetSchemaDataElement from ._models_py3 import DatasetStorageFormat + from ._models_py3 import DatasetTarCompression + from ._models_py3 import DatasetTarGZipCompression from ._models_py3 import DatasetZipDeflateCompression from ._models_py3 import Db2LinkedService from ._models_py3 import Db2Source @@ -224,11 +242,14 @@ from ._models_py3 import ErrorContract from ._models_py3 import ErrorResponse from ._models_py3 import EvaluateDataFlowExpressionRequest + from ._models_py3 import ExcelDataset + from ._models_py3 import ExcelSource from ._models_py3 import ExecuteDataFlowActivity from ._models_py3 import ExecuteDataFlowActivityTypePropertiesCompute from ._models_py3 import ExecutePipelineActivity from ._models_py3 import ExecuteSSISPackageActivity from ._models_py3 import ExecutionActivity + from ._models_py3 import ExportSettings from ._models_py3 import ExposureControlRequest from ._models_py3 import ExposureControlResponse from ._models_py3 import Expression @@ -236,6 +257,7 @@ from ._models_py3 import FileServerLocation from ._models_py3 import FileServerReadSettings from ._models_py3 import FileServerWriteSettings + from ._models_py3 import FileShareDataset from ._models_py3 import FileSystemSink from ._models_py3 import FileSystemSource from ._models_py3 import FilterActivity @@ -278,6 +300,7 @@ from ._models_py3 import HiveLinkedService from ._models_py3 import HiveObjectDataset from ._models_py3 import HiveSource + from ._models_py3 import HttpDataset from ._models_py3 import HttpLinkedService from ._models_py3 import HttpReadSettings from ._models_py3 import HttpServerLocation @@ -289,6 +312,7 @@ from ._models_py3 import ImpalaLinkedService from ._models_py3 import ImpalaObjectDataset from ._models_py3 import ImpalaSource + from ._models_py3 import ImportSettings from ._models_py3 import InformixLinkedService from ._models_py3 import InformixSink from ._models_py3 import InformixSource @@ -309,6 +333,7 @@ from ._models_py3 import JiraSource from ._models_py3 import JsonDataset from ._models_py3 import JsonFormat + from ._models_py3 import JsonReadSettings from ._models_py3 import JsonSink from ._models_py3 import JsonSource from ._models_py3 import JsonWriteSettings @@ -326,6 +351,8 @@ from ._models_py3 import LinkedServiceListResponse from ._models_py3 import LinkedServiceReference from ._models_py3 import LinkedServiceResource + from ._models_py3 import LogLocationSettings + from ._models_py3 import LogSettings from ._models_py3 import LogStorageSettings from ._models_py3 import LookupActivity from ._models_py3 import MagentoLinkedService @@ -333,6 +360,7 @@ from ._models_py3 import MagentoSource from ._models_py3 import ManagedIdentity from ._models_py3 import ManagedIntegrationRuntime + from ._models_py3 import ManagedVirtualNetworkReference from ._models_py3 import ManagedVirtualNetworkSettings from ._models_py3 import MappingDataFlow from ._models_py3 import MariaDBLinkedService @@ -345,6 +373,9 @@ from ._models_py3 import MicrosoftAccessSink from ._models_py3 import MicrosoftAccessSource from ._models_py3 import MicrosoftAccessTableDataset + from ._models_py3 import MongoDbAtlasCollectionDataset + from ._models_py3 import MongoDbAtlasLinkedService + from ._models_py3 import MongoDbAtlasSource from ._models_py3 import MongoDbCollectionDataset from ._models_py3 import MongoDbCursorMethodsProperties from ._models_py3 import MongoDbLinkedService @@ -392,11 +423,13 @@ from ._models_py3 import OrcFormat from ._models_py3 import OrcSink from ._models_py3 import OrcSource + from ._models_py3 import OrcWriteSettings from ._models_py3 import ParameterSpecification from ._models_py3 import ParquetDataset from ._models_py3 import ParquetFormat from ._models_py3 import ParquetSink from ._models_py3 import ParquetSource + from ._models_py3 import ParquetWriteSettings from ._models_py3 import PaypalLinkedService from ._models_py3 import PaypalObjectDataset from ._models_py3 import PaypalSource @@ -442,6 +475,7 @@ from ._models_py3 import ResponsysSource from ._models_py3 import RestResourceDataset from ._models_py3 import RestServiceLinkedService + from ._models_py3 import RestSink from ._models_py3 import RestSource from ._models_py3 import RetryPolicy from ._models_py3 import RunFilterParameters @@ -501,10 +535,20 @@ from ._models_py3 import SftpReadSettings from ._models_py3 import SftpServerLinkedService from ._models_py3 import SftpWriteSettings + from ._models_py3 import SharePointOnlineListLinkedService + from ._models_py3 import SharePointOnlineListResourceDataset + from ._models_py3 import SharePointOnlineListSource from ._models_py3 import ShopifyLinkedService from ._models_py3 import ShopifyObjectDataset from ._models_py3 import ShopifySource + from ._models_py3 import SkipErrorFile from ._models_py3 import Sku + from ._models_py3 import SnowflakeDataset + from ._models_py3 import SnowflakeExportCopyCommand + from ._models_py3 import SnowflakeImportCopyCommand + from ._models_py3 import SnowflakeLinkedService + from ._models_py3 import SnowflakeSink + from ._models_py3 import SnowflakeSource from ._models_py3 import SparkBatchJob from ._models_py3 import SparkBatchJobState from ._models_py3 import SparkJobDefinition @@ -523,6 +567,7 @@ from ._models_py3 import SqlDWSource from ._models_py3 import SqlMISink from ._models_py3 import SqlMISource + from ._models_py3 import SqlPartitionSettings from ._models_py3 import SqlPool from ._models_py3 import SqlPoolInfoListResult from ._models_py3 import SqlPoolReference @@ -562,6 +607,8 @@ from ._models_py3 import SynapseSparkJobReference from ._models_py3 import TabularSource from ._models_py3 import TabularTranslator + from ._models_py3 import TarGZipReadSettings + from ._models_py3 import TarReadSettings from ._models_py3 import TeradataLinkedService from ._models_py3 import TeradataPartitionSettings from ._models_py3 import TeradataSource @@ -581,6 +628,7 @@ from ._models_py3 import TriggerSubscriptionOperationStatus from ._models_py3 import TumblingWindowTrigger from ._models_py3 import TumblingWindowTriggerDependencyReference + from ._models_py3 import TypeConversionSettings from ._models_py3 import UntilActivity from ._models_py3 import UserProperty from ._models_py3 import ValidationActivity @@ -608,6 +656,10 @@ from ._models_py3 import XeroLinkedService from ._models_py3 import XeroObjectDataset from ._models_py3 import XeroSource + from ._models_py3 import XmlDataset + from ._models_py3 import XmlReadSettings + from ._models_py3 import XmlSource + from ._models_py3 import ZipDeflateReadSettings from ._models_py3 import ZohoLinkedService from ._models_py3 import ZohoObjectDataset from ._models_py3 import ZohoSource @@ -618,12 +670,14 @@ from ._models import ActivityRun # type: ignore from ._models import ActivityRunsQueryResponse # type: ignore from ._models import AddDataFlowToDebugSessionResponse # type: ignore + from ._models import AdditionalColumns # type: ignore from ._models import AmazonMWSLinkedService # type: ignore from ._models import AmazonMWSObjectDataset # type: ignore from ._models import AmazonMWSSource # type: ignore from ._models import AmazonRedshiftLinkedService # type: ignore from ._models import AmazonRedshiftSource # type: ignore from ._models import AmazonRedshiftTableDataset # type: ignore + from ._models import AmazonS3Dataset # type: ignore from ._models import AmazonS3LinkedService # type: ignore from ._models import AmazonS3Location # type: ignore from ._models import AmazonS3ReadSettings # type: ignore @@ -637,6 +691,8 @@ from ._models import AvroSource # type: ignore from ._models import AvroWriteSettings # type: ignore from ._models import AzureBatchLinkedService # type: ignore + from ._models import AzureBlobDataset # type: ignore + from ._models import AzureBlobFSDataset # type: ignore from ._models import AzureBlobFSLinkedService # type: ignore from ._models import AzureBlobFSLocation # type: ignore from ._models import AzureBlobFSReadSettings # type: ignore @@ -653,17 +709,25 @@ from ._models import AzureDataExplorerSource # type: ignore from ._models import AzureDataExplorerTableDataset # type: ignore from ._models import AzureDataLakeAnalyticsLinkedService # type: ignore + from ._models import AzureDataLakeStoreDataset # type: ignore from ._models import AzureDataLakeStoreLinkedService # type: ignore from ._models import AzureDataLakeStoreLocation # type: ignore from ._models import AzureDataLakeStoreReadSettings # type: ignore from ._models import AzureDataLakeStoreSink # type: ignore from ._models import AzureDataLakeStoreSource # type: ignore from ._models import AzureDataLakeStoreWriteSettings # type: ignore + from ._models import AzureDatabricksDeltaLakeDataset # type: ignore + from ._models import AzureDatabricksDeltaLakeExportCommand # type: ignore + from ._models import AzureDatabricksDeltaLakeImportCommand # type: ignore + from ._models import AzureDatabricksDeltaLakeLinkedService # type: ignore + from ._models import AzureDatabricksDeltaLakeSink # type: ignore + from ._models import AzureDatabricksDeltaLakeSource # type: ignore from ._models import AzureDatabricksLinkedService # type: ignore from ._models import AzureEntityResource # type: ignore from ._models import AzureFileStorageLinkedService # type: ignore from ._models import AzureFileStorageLocation # type: ignore from ._models import AzureFileStorageReadSettings # type: ignore + from ._models import AzureFileStorageWriteSettings # type: ignore from ._models import AzureFunctionActivity # type: ignore from ._models import AzureFunctionLinkedService # type: ignore from ._models import AzureKeyVaultLinkedService # type: ignore @@ -706,6 +770,7 @@ from ._models import BigDataPoolResourceInfo # type: ignore from ._models import BigDataPoolResourceInfoListResult # type: ignore from ._models import BinaryDataset # type: ignore + from ._models import BinaryReadSettings # type: ignore from ._models import BinarySink # type: ignore from ._models import BinarySource # type: ignore from ._models import BlobEventsTrigger # type: ignore @@ -721,11 +786,13 @@ from ._models import CommonDataServiceForAppsLinkedService # type: ignore from ._models import CommonDataServiceForAppsSink # type: ignore from ._models import CommonDataServiceForAppsSource # type: ignore + from ._models import CompressionReadSettings # type: ignore from ._models import ConcurLinkedService # type: ignore from ._models import ConcurObjectDataset # type: ignore from ._models import ConcurSource # type: ignore from ._models import ControlActivity # type: ignore from ._models import CopyActivity # type: ignore + from ._models import CopyActivityLogSettings # type: ignore from ._models import CopySink # type: ignore from ._models import CopySource # type: ignore from ._models import CopyTranslator # type: ignore @@ -747,6 +814,7 @@ from ._models import CustomActivityReferenceObject # type: ignore from ._models import CustomDataSourceLinkedService # type: ignore from ._models import CustomDataset # type: ignore + from ._models import CustomEventsTrigger # type: ignore from ._models import CustomSetupBase # type: ignore from ._models import CustomerManagedKeyDetails # type: ignore from ._models import DWCopyCommandDefaultValue # type: ignore @@ -789,6 +857,8 @@ from ._models import DatasetResource # type: ignore from ._models import DatasetSchemaDataElement # type: ignore from ._models import DatasetStorageFormat # type: ignore + from ._models import DatasetTarCompression # type: ignore + from ._models import DatasetTarGZipCompression # type: ignore from ._models import DatasetZipDeflateCompression # type: ignore from ._models import Db2LinkedService # type: ignore from ._models import Db2Source # type: ignore @@ -829,11 +899,14 @@ from ._models import ErrorContract # type: ignore from ._models import ErrorResponse # type: ignore from ._models import EvaluateDataFlowExpressionRequest # type: ignore + from ._models import ExcelDataset # type: ignore + from ._models import ExcelSource # type: ignore from ._models import ExecuteDataFlowActivity # type: ignore from ._models import ExecuteDataFlowActivityTypePropertiesCompute # type: ignore from ._models import ExecutePipelineActivity # type: ignore from ._models import ExecuteSSISPackageActivity # type: ignore from ._models import ExecutionActivity # type: ignore + from ._models import ExportSettings # type: ignore from ._models import ExposureControlRequest # type: ignore from ._models import ExposureControlResponse # type: ignore from ._models import Expression # type: ignore @@ -841,6 +914,7 @@ from ._models import FileServerLocation # type: ignore from ._models import FileServerReadSettings # type: ignore from ._models import FileServerWriteSettings # type: ignore + from ._models import FileShareDataset # type: ignore from ._models import FileSystemSink # type: ignore from ._models import FileSystemSource # type: ignore from ._models import FilterActivity # type: ignore @@ -883,6 +957,7 @@ from ._models import HiveLinkedService # type: ignore from ._models import HiveObjectDataset # type: ignore from ._models import HiveSource # type: ignore + from ._models import HttpDataset # type: ignore from ._models import HttpLinkedService # type: ignore from ._models import HttpReadSettings # type: ignore from ._models import HttpServerLocation # type: ignore @@ -894,6 +969,7 @@ from ._models import ImpalaLinkedService # type: ignore from ._models import ImpalaObjectDataset # type: ignore from ._models import ImpalaSource # type: ignore + from ._models import ImportSettings # type: ignore from ._models import InformixLinkedService # type: ignore from ._models import InformixSink # type: ignore from ._models import InformixSource # type: ignore @@ -914,6 +990,7 @@ from ._models import JiraSource # type: ignore from ._models import JsonDataset # type: ignore from ._models import JsonFormat # type: ignore + from ._models import JsonReadSettings # type: ignore from ._models import JsonSink # type: ignore from ._models import JsonSource # type: ignore from ._models import JsonWriteSettings # type: ignore @@ -931,6 +1008,8 @@ from ._models import LinkedServiceListResponse # type: ignore from ._models import LinkedServiceReference # type: ignore from ._models import LinkedServiceResource # type: ignore + from ._models import LogLocationSettings # type: ignore + from ._models import LogSettings # type: ignore from ._models import LogStorageSettings # type: ignore from ._models import LookupActivity # type: ignore from ._models import MagentoLinkedService # type: ignore @@ -938,6 +1017,7 @@ from ._models import MagentoSource # type: ignore from ._models import ManagedIdentity # type: ignore from ._models import ManagedIntegrationRuntime # type: ignore + from ._models import ManagedVirtualNetworkReference # type: ignore from ._models import ManagedVirtualNetworkSettings # type: ignore from ._models import MappingDataFlow # type: ignore from ._models import MariaDBLinkedService # type: ignore @@ -950,6 +1030,9 @@ from ._models import MicrosoftAccessSink # type: ignore from ._models import MicrosoftAccessSource # type: ignore from ._models import MicrosoftAccessTableDataset # type: ignore + from ._models import MongoDbAtlasCollectionDataset # type: ignore + from ._models import MongoDbAtlasLinkedService # type: ignore + from ._models import MongoDbAtlasSource # type: ignore from ._models import MongoDbCollectionDataset # type: ignore from ._models import MongoDbCursorMethodsProperties # type: ignore from ._models import MongoDbLinkedService # type: ignore @@ -997,11 +1080,13 @@ from ._models import OrcFormat # type: ignore from ._models import OrcSink # type: ignore from ._models import OrcSource # type: ignore + from ._models import OrcWriteSettings # type: ignore from ._models import ParameterSpecification # type: ignore from ._models import ParquetDataset # type: ignore from ._models import ParquetFormat # type: ignore from ._models import ParquetSink # type: ignore from ._models import ParquetSource # type: ignore + from ._models import ParquetWriteSettings # type: ignore from ._models import PaypalLinkedService # type: ignore from ._models import PaypalObjectDataset # type: ignore from ._models import PaypalSource # type: ignore @@ -1047,6 +1132,7 @@ from ._models import ResponsysSource # type: ignore from ._models import RestResourceDataset # type: ignore from ._models import RestServiceLinkedService # type: ignore + from ._models import RestSink # type: ignore from ._models import RestSource # type: ignore from ._models import RetryPolicy # type: ignore from ._models import RunFilterParameters # type: ignore @@ -1106,10 +1192,20 @@ from ._models import SftpReadSettings # type: ignore from ._models import SftpServerLinkedService # type: ignore from ._models import SftpWriteSettings # type: ignore + from ._models import SharePointOnlineListLinkedService # type: ignore + from ._models import SharePointOnlineListResourceDataset # type: ignore + from ._models import SharePointOnlineListSource # type: ignore from ._models import ShopifyLinkedService # type: ignore from ._models import ShopifyObjectDataset # type: ignore from ._models import ShopifySource # type: ignore + from ._models import SkipErrorFile # type: ignore from ._models import Sku # type: ignore + from ._models import SnowflakeDataset # type: ignore + from ._models import SnowflakeExportCopyCommand # type: ignore + from ._models import SnowflakeImportCopyCommand # type: ignore + from ._models import SnowflakeLinkedService # type: ignore + from ._models import SnowflakeSink # type: ignore + from ._models import SnowflakeSource # type: ignore from ._models import SparkBatchJob # type: ignore from ._models import SparkBatchJobState # type: ignore from ._models import SparkJobDefinition # type: ignore @@ -1128,6 +1224,7 @@ from ._models import SqlDWSource # type: ignore from ._models import SqlMISink # type: ignore from ._models import SqlMISource # type: ignore + from ._models import SqlPartitionSettings # type: ignore from ._models import SqlPool # type: ignore from ._models import SqlPoolInfoListResult # type: ignore from ._models import SqlPoolReference # type: ignore @@ -1167,6 +1264,8 @@ from ._models import SynapseSparkJobReference # type: ignore from ._models import TabularSource # type: ignore from ._models import TabularTranslator # type: ignore + from ._models import TarGZipReadSettings # type: ignore + from ._models import TarReadSettings # type: ignore from ._models import TeradataLinkedService # type: ignore from ._models import TeradataPartitionSettings # type: ignore from ._models import TeradataSource # type: ignore @@ -1186,6 +1285,7 @@ from ._models import TriggerSubscriptionOperationStatus # type: ignore from ._models import TumblingWindowTrigger # type: ignore from ._models import TumblingWindowTriggerDependencyReference # type: ignore + from ._models import TypeConversionSettings # type: ignore from ._models import UntilActivity # type: ignore from ._models import UserProperty # type: ignore from ._models import ValidationActivity # type: ignore @@ -1213,6 +1313,10 @@ from ._models import XeroLinkedService # type: ignore from ._models import XeroObjectDataset # type: ignore from ._models import XeroSource # type: ignore + from ._models import XmlDataset # type: ignore + from ._models import XmlReadSettings # type: ignore + from ._models import XmlSource # type: ignore + from ._models import ZipDeflateReadSettings # type: ignore from ._models import ZohoLinkedService # type: ignore from ._models import ZohoObjectDataset # type: ignore from ._models import ZohoSource # type: ignore @@ -1225,14 +1329,15 @@ BlobEventType, CassandraSourceReadConsistencyLevels, CellOutputType, + CompressionCodec, CopyBehaviorType, DataFlowComputeType, DataFlowReferenceType, DatasetCompressionLevel, DatasetReferenceType, DayOfWeek, + DaysOfWeek, Db2AuthenticationType, - DelimitedTextCompressionCodec, DependencyCondition, DynamicsAuthenticationType, DynamicsDeploymentType, @@ -1270,7 +1375,7 @@ OraclePartitionOption, OrcCompressionCodec, ParameterType, - ParquetCompressionCodec, + ParquetCompressionCodecEnum, PhoenixAuthenticationType, PipelineReferenceType, PluginCurrentState, @@ -1300,6 +1405,7 @@ SparkServerType, SparkThriftTransportProtocol, SqlConnectionType, + SqlPartitionOption, SqlPoolReferenceType, SqlScriptType, SsisLogLocationType, @@ -1326,12 +1432,14 @@ 'ActivityRun', 'ActivityRunsQueryResponse', 'AddDataFlowToDebugSessionResponse', + 'AdditionalColumns', 'AmazonMWSLinkedService', 'AmazonMWSObjectDataset', 'AmazonMWSSource', 'AmazonRedshiftLinkedService', 'AmazonRedshiftSource', 'AmazonRedshiftTableDataset', + 'AmazonS3Dataset', 'AmazonS3LinkedService', 'AmazonS3Location', 'AmazonS3ReadSettings', @@ -1345,6 +1453,8 @@ 'AvroSource', 'AvroWriteSettings', 'AzureBatchLinkedService', + 'AzureBlobDataset', + 'AzureBlobFSDataset', 'AzureBlobFSLinkedService', 'AzureBlobFSLocation', 'AzureBlobFSReadSettings', @@ -1361,17 +1471,25 @@ 'AzureDataExplorerSource', 'AzureDataExplorerTableDataset', 'AzureDataLakeAnalyticsLinkedService', + 'AzureDataLakeStoreDataset', 'AzureDataLakeStoreLinkedService', 'AzureDataLakeStoreLocation', 'AzureDataLakeStoreReadSettings', 'AzureDataLakeStoreSink', 'AzureDataLakeStoreSource', 'AzureDataLakeStoreWriteSettings', + 'AzureDatabricksDeltaLakeDataset', + 'AzureDatabricksDeltaLakeExportCommand', + 'AzureDatabricksDeltaLakeImportCommand', + 'AzureDatabricksDeltaLakeLinkedService', + 'AzureDatabricksDeltaLakeSink', + 'AzureDatabricksDeltaLakeSource', 'AzureDatabricksLinkedService', 'AzureEntityResource', 'AzureFileStorageLinkedService', 'AzureFileStorageLocation', 'AzureFileStorageReadSettings', + 'AzureFileStorageWriteSettings', 'AzureFunctionActivity', 'AzureFunctionLinkedService', 'AzureKeyVaultLinkedService', @@ -1414,6 +1532,7 @@ 'BigDataPoolResourceInfo', 'BigDataPoolResourceInfoListResult', 'BinaryDataset', + 'BinaryReadSettings', 'BinarySink', 'BinarySource', 'BlobEventsTrigger', @@ -1429,11 +1548,13 @@ 'CommonDataServiceForAppsLinkedService', 'CommonDataServiceForAppsSink', 'CommonDataServiceForAppsSource', + 'CompressionReadSettings', 'ConcurLinkedService', 'ConcurObjectDataset', 'ConcurSource', 'ControlActivity', 'CopyActivity', + 'CopyActivityLogSettings', 'CopySink', 'CopySource', 'CopyTranslator', @@ -1455,6 +1576,7 @@ 'CustomActivityReferenceObject', 'CustomDataSourceLinkedService', 'CustomDataset', + 'CustomEventsTrigger', 'CustomSetupBase', 'CustomerManagedKeyDetails', 'DWCopyCommandDefaultValue', @@ -1497,6 +1619,8 @@ 'DatasetResource', 'DatasetSchemaDataElement', 'DatasetStorageFormat', + 'DatasetTarCompression', + 'DatasetTarGZipCompression', 'DatasetZipDeflateCompression', 'Db2LinkedService', 'Db2Source', @@ -1537,11 +1661,14 @@ 'ErrorContract', 'ErrorResponse', 'EvaluateDataFlowExpressionRequest', + 'ExcelDataset', + 'ExcelSource', 'ExecuteDataFlowActivity', 'ExecuteDataFlowActivityTypePropertiesCompute', 'ExecutePipelineActivity', 'ExecuteSSISPackageActivity', 'ExecutionActivity', + 'ExportSettings', 'ExposureControlRequest', 'ExposureControlResponse', 'Expression', @@ -1549,6 +1676,7 @@ 'FileServerLocation', 'FileServerReadSettings', 'FileServerWriteSettings', + 'FileShareDataset', 'FileSystemSink', 'FileSystemSource', 'FilterActivity', @@ -1591,6 +1719,7 @@ 'HiveLinkedService', 'HiveObjectDataset', 'HiveSource', + 'HttpDataset', 'HttpLinkedService', 'HttpReadSettings', 'HttpServerLocation', @@ -1602,6 +1731,7 @@ 'ImpalaLinkedService', 'ImpalaObjectDataset', 'ImpalaSource', + 'ImportSettings', 'InformixLinkedService', 'InformixSink', 'InformixSource', @@ -1622,6 +1752,7 @@ 'JiraSource', 'JsonDataset', 'JsonFormat', + 'JsonReadSettings', 'JsonSink', 'JsonSource', 'JsonWriteSettings', @@ -1639,6 +1770,8 @@ 'LinkedServiceListResponse', 'LinkedServiceReference', 'LinkedServiceResource', + 'LogLocationSettings', + 'LogSettings', 'LogStorageSettings', 'LookupActivity', 'MagentoLinkedService', @@ -1646,6 +1779,7 @@ 'MagentoSource', 'ManagedIdentity', 'ManagedIntegrationRuntime', + 'ManagedVirtualNetworkReference', 'ManagedVirtualNetworkSettings', 'MappingDataFlow', 'MariaDBLinkedService', @@ -1658,6 +1792,9 @@ 'MicrosoftAccessSink', 'MicrosoftAccessSource', 'MicrosoftAccessTableDataset', + 'MongoDbAtlasCollectionDataset', + 'MongoDbAtlasLinkedService', + 'MongoDbAtlasSource', 'MongoDbCollectionDataset', 'MongoDbCursorMethodsProperties', 'MongoDbLinkedService', @@ -1705,11 +1842,13 @@ 'OrcFormat', 'OrcSink', 'OrcSource', + 'OrcWriteSettings', 'ParameterSpecification', 'ParquetDataset', 'ParquetFormat', 'ParquetSink', 'ParquetSource', + 'ParquetWriteSettings', 'PaypalLinkedService', 'PaypalObjectDataset', 'PaypalSource', @@ -1755,6 +1894,7 @@ 'ResponsysSource', 'RestResourceDataset', 'RestServiceLinkedService', + 'RestSink', 'RestSource', 'RetryPolicy', 'RunFilterParameters', @@ -1814,10 +1954,20 @@ 'SftpReadSettings', 'SftpServerLinkedService', 'SftpWriteSettings', + 'SharePointOnlineListLinkedService', + 'SharePointOnlineListResourceDataset', + 'SharePointOnlineListSource', 'ShopifyLinkedService', 'ShopifyObjectDataset', 'ShopifySource', + 'SkipErrorFile', 'Sku', + 'SnowflakeDataset', + 'SnowflakeExportCopyCommand', + 'SnowflakeImportCopyCommand', + 'SnowflakeLinkedService', + 'SnowflakeSink', + 'SnowflakeSource', 'SparkBatchJob', 'SparkBatchJobState', 'SparkJobDefinition', @@ -1836,6 +1986,7 @@ 'SqlDWSource', 'SqlMISink', 'SqlMISource', + 'SqlPartitionSettings', 'SqlPool', 'SqlPoolInfoListResult', 'SqlPoolReference', @@ -1875,6 +2026,8 @@ 'SynapseSparkJobReference', 'TabularSource', 'TabularTranslator', + 'TarGZipReadSettings', + 'TarReadSettings', 'TeradataLinkedService', 'TeradataPartitionSettings', 'TeradataSource', @@ -1894,6 +2047,7 @@ 'TriggerSubscriptionOperationStatus', 'TumblingWindowTrigger', 'TumblingWindowTriggerDependencyReference', + 'TypeConversionSettings', 'UntilActivity', 'UserProperty', 'ValidationActivity', @@ -1921,6 +2075,10 @@ 'XeroLinkedService', 'XeroObjectDataset', 'XeroSource', + 'XmlDataset', + 'XmlReadSettings', + 'XmlSource', + 'ZipDeflateReadSettings', 'ZohoLinkedService', 'ZohoObjectDataset', 'ZohoSource', @@ -1931,14 +2089,15 @@ 'BlobEventType', 'CassandraSourceReadConsistencyLevels', 'CellOutputType', + 'CompressionCodec', 'CopyBehaviorType', 'DataFlowComputeType', 'DataFlowReferenceType', 'DatasetCompressionLevel', 'DatasetReferenceType', 'DayOfWeek', + 'DaysOfWeek', 'Db2AuthenticationType', - 'DelimitedTextCompressionCodec', 'DependencyCondition', 'DynamicsAuthenticationType', 'DynamicsDeploymentType', @@ -1976,7 +2135,7 @@ 'OraclePartitionOption', 'OrcCompressionCodec', 'ParameterType', - 'ParquetCompressionCodec', + 'ParquetCompressionCodecEnum', 'PhoenixAuthenticationType', 'PipelineReferenceType', 'PluginCurrentState', @@ -2006,6 +2165,7 @@ 'SparkServerType', 'SparkThriftTransportProtocol', 'SqlConnectionType', + 'SqlPartitionOption', 'SqlPoolReferenceType', 'SqlScriptType', 'SsisLogLocationType', diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py index 2ce1b708e7c1..8316033e8e61 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py @@ -91,6 +91,17 @@ class CellOutputType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): STREAM = "stream" ERROR = "error" +class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + BZIP2 = "bzip2" + GZIP = "gzip" + DEFLATE = "deflate" + ZIP_DEFLATE = "zipDeflate" + SNAPPY = "snappy" + LZ4 = "lz4" + TAR = "tar" + TAR_G_ZIP = "tarGZip" + class CopyBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """All available types of copy behavior. """ @@ -114,8 +125,6 @@ class DataFlowReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)) DATA_FLOW_REFERENCE = "DataFlowReference" class DatasetCompressionLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """All available compression levels. - """ OPTIMAL = "Optimal" FASTEST = "Fastest" @@ -127,6 +136,18 @@ class DatasetReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): DATASET_REFERENCE = "DatasetReference" class DayOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The days of the week. + """ + + SUNDAY = "Sunday" + MONDAY = "Monday" + TUESDAY = "Tuesday" + WEDNESDAY = "Wednesday" + THURSDAY = "Thursday" + FRIDAY = "Friday" + SATURDAY = "Saturday" + +class DaysOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): SUNDAY = "Sunday" MONDAY = "Monday" @@ -137,20 +158,12 @@ class DayOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): SATURDAY = "Saturday" class Db2AuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """AuthenticationType to be used for connection. + """AuthenticationType to be used for connection. It is mutually exclusive with connectionString + property. """ BASIC = "Basic" -class DelimitedTextCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - - BZIP2 = "bzip2" - GZIP = "gzip" - DEFLATE = "deflate" - ZIP_DEFLATE = "zipDeflate" - SNAPPY = "snappy" - LZ4 = "lz4" - class DependencyCondition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): SUCCEEDED = "Succeeded" @@ -187,7 +200,7 @@ class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnum SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The write behavior for the operation. + """Defines values for DynamicsSinkWriteBehavior. """ UPSERT = "Upsert" @@ -441,6 +454,7 @@ class OrcCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): NONE = "none" ZLIB = "zlib" SNAPPY = "snappy" + LZO = "lzo" class ParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter type. @@ -454,7 +468,7 @@ class ParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): ARRAY = "Array" SECURE_STRING = "SecureString" -class ParquetCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ParquetCompressionCodecEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): NONE = "none" GZIP = "gzip" @@ -708,6 +722,14 @@ class SqlConnectionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): SQL_ON_DEMAND = "SqlOnDemand" SQL_POOL = "SqlPool" +class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The partition mechanism that will be used for Sql read in parallel. + """ + + NONE = "None" + PHYSICAL_PARTITIONS_OF_TABLE = "PhysicalPartitionsOfTable" + DYNAMIC_RANGE = "DynamicRange" + class SqlPoolReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """SQL pool reference type. """ @@ -733,6 +755,7 @@ class SsisPackageLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum SSISDB = "SSISDB" FILE = "File" INLINE_PACKAGE = "InlinePackage" + PACKAGE_STORE = "PackageStore" class StoredProcedureParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Stored procedure parameter type. @@ -796,6 +819,7 @@ class TumblingWindowFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum MINUTE = "Minute" HOUR = "Hour" + MONTH = "Month" class Type(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Linked service reference type. diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py index 15aa2697503b..c35b797b9a06 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py @@ -288,11 +288,34 @@ def __init__( self.job_version = kwargs.get('job_version', None) +class AdditionalColumns(msrest.serialization.Model): + """Specify the column name and value of additional columns. + + :param name: Additional column name. Type: string (or Expression with resultType string). + :type name: object + :param value: Additional column value. Type: string (or Expression with resultType string). + :type value: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AdditionalColumns, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) + + class LinkedService(msrest.serialization.Model): """The Azure Synapse nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMWSLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFSLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMLLinkedService, AzureMLServiceLinkedService, AzureMariaDBLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDWLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMILinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAXLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HDInsightLinkedService, HDInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBWLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, ShopifyLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. + sub-classes are: AmazonMWSLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFSLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMLLinkedService, AzureMLServiceLinkedService, AzureMariaDBLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDWLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMILinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAXLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HDInsightLinkedService, HDInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBWLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. All required parameters must be populated in order to send to Azure. @@ -325,7 +348,7 @@ class LinkedService(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMWSLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMLLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HDInsightLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBWLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'Shopify': 'ShopifyLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} + 'type': {'AmazonMWS': 'AmazonMWSLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMLLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HDInsightLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBWLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} } def __init__( @@ -439,7 +462,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMWSObjectDataset, AmazonRedshiftTableDataset, AvroDataset, AzureDataExplorerTableDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, ShopifyObjectDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, ZohoObjectDataset. + sub-classes are: AmazonMWSObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFSDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -485,7 +508,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'Avro': 'AvroDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -566,7 +589,7 @@ class CopySource(msrest.serialization.Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFSSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, TabularSource, WebSource. + sub-classes are: AvroSource, AzureBlobFSSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. All required parameters must be populated in order to send to Azure. @@ -599,7 +622,7 @@ class CopySource(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource'} + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} } def __init__( @@ -639,6 +662,9 @@ class TabularSource(CopySource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -652,6 +678,7 @@ class TabularSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } _subtype_map = { @@ -665,6 +692,7 @@ def __init__( super(TabularSource, self).__init__(**kwargs) self.type = 'TabularSource' # type: str self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) class AmazonMWSSource(TabularSource): @@ -689,6 +717,9 @@ class AmazonMWSSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -705,6 +736,7 @@ class AmazonMWSSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -812,6 +844,9 @@ class AmazonRedshiftSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when @@ -831,6 +866,7 @@ class AmazonRedshiftSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } @@ -914,6 +950,99 @@ def __init__( self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) +class AmazonS3Dataset(Dataset): + """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression + with resultType string). + :type bucket_name: object + :param key: The key of the Amazon S3 object. Type: string (or Expression with resultType + string). + :type key: object + :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with + resultType string). + :type prefix: object + :param version: The version for the S3 object. Type: string (or Expression with resultType + string). + :type version: object + :param modified_datetime_start: The start of S3 object's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of files. + :type format: ~azure.synapse.artifacts.models.DatasetStorageFormat + :param compression: The data compression method used for the Amazon S3 object. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'object'}, + 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + **kwargs + ): + super(AmazonS3Dataset, self).__init__(**kwargs) + self.type = 'AmazonS3Object' # type: str + self.bucket_name = kwargs['bucket_name'] + self.key = kwargs.get('key', None) + self.prefix = kwargs.get('prefix', None) + self.version = kwargs.get('version', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + + class AmazonS3LinkedService(LinkedService): """Linked service for Amazon S3. @@ -932,6 +1061,9 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) + or TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :type authentication_type: object :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). :type access_key_id: object @@ -942,6 +1074,8 @@ class AmazonS3LinkedService(LinkedService): an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object + :param session_token: The session token for the S3 temporary security credential. + :type session_token: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -959,9 +1093,11 @@ class AmazonS3LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -971,9 +1107,11 @@ def __init__( ): super(AmazonS3LinkedService, self).__init__(**kwargs) self.type = 'AmazonS3' # type: str + self.authentication_type = kwargs.get('authentication_type', None) self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) self.service_url = kwargs.get('service_url', None) + self.session_token = kwargs.get('session_token', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -1138,8 +1276,18 @@ class AmazonS3ReadSettings(StoreReadSettings): :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -1160,7 +1308,10 @@ class AmazonS3ReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -1175,7 +1326,10 @@ def __init__( self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -1382,9 +1536,8 @@ class AvroDataset(Dataset): :type folder: ~azure.synapse.artifacts.models.DatasetFolder :param location: The location of the avro storage. :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or ~azure.synapse.artifacts.models.AvroCompressionCodec + :param avro_compression_codec: A string from AvroCompressionCodecEnum or an expression. + :type avro_compression_codec: object :param avro_compression_level: :type avro_compression_level: int """ @@ -1406,7 +1559,7 @@ class AvroDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'}, 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } @@ -1505,7 +1658,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. + sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. @@ -1546,7 +1699,7 @@ class CopySink(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -1641,6 +1794,9 @@ class AvroSource(CopySource): :type max_concurrent_connections: object :param store_settings: Avro store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -1654,6 +1810,7 @@ class AvroSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -1663,13 +1820,14 @@ def __init__( super(AvroSource, self).__init__(**kwargs) self.type = 'AvroSource' # type: str self.store_settings = kwargs.get('store_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) class FormatWriteSettings(msrest.serialization.Model): """Format write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, ParquetWriteSettings. All required parameters must be populated in order to send to Azure. @@ -1690,7 +1848,7 @@ class FormatWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__( @@ -1716,6 +1874,13 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -1727,6 +1892,8 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -1737,6 +1904,8 @@ def __init__( self.type = 'AvroWriteSettings' # type: str self.record_name = kwargs.get('record_name', None) self.record_namespace = kwargs.get('record_namespace', None) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) class AzureBatchLinkedService(LinkedService): @@ -1813,6 +1982,165 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class AzureBlobDataset(Dataset): + """The Azure Blob storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param folder_path: The path of the Azure Blob storage. Type: string (or Expression with + resultType string). + :type folder_path: object + :param table_root_location: The root of blob path. Type: string (or Expression with resultType + string). + :type table_root_location: object + :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType + string). + :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the Azure Blob storage. + :type format: ~azure.synapse.artifacts.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureBlobDataset, self).__init__(**kwargs) + self.type = 'AzureBlob' # type: str + self.folder_path = kwargs.get('folder_path', None) + self.table_root_location = kwargs.get('table_root_location', None) + self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or + Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression + with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.synapse.artifacts.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureBlobFSDataset, self).__init__(**kwargs) + self.type = 'AzureBlobFSFile' # type: str + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + + class AzureBlobFSLinkedService(LinkedService): """Azure Data Lake Storage Gen2 linked service. @@ -1846,6 +2174,10 @@ class AzureBlobFSLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1869,6 +2201,7 @@ class AzureBlobFSLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1883,6 +2216,7 @@ def __init__( self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -1950,8 +2284,18 @@ class AzureBlobFSReadSettings(StoreReadSettings): :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -1971,7 +2315,10 @@ class AzureBlobFSReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -1985,7 +2332,10 @@ def __init__( self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -2103,7 +2453,7 @@ class StoreWriteSettings(msrest.serialization.Model): """Connector write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, FileServerWriteSettings, SftpWriteSettings. + sub-classes are: AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, AzureFileStorageWriteSettings, FileServerWriteSettings, SftpWriteSettings. All required parameters must be populated in order to send to Azure. @@ -2131,7 +2481,7 @@ class StoreWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} + 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureFileStorageWriteSettings': 'AzureFileStorageWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} } def __init__( @@ -2227,6 +2577,10 @@ class AzureBlobStorageLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2252,6 +2606,7 @@ class AzureBlobStorageLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } @@ -2269,6 +2624,7 @@ def __init__( self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -2339,8 +2695,18 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -2361,7 +2727,10 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -2376,7 +2745,10 @@ def __init__( self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -2422,6 +2794,394 @@ def __init__( self.block_size_in_mb = kwargs.get('block_size_in_mb', None) +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table: The name of delta table. Type: string (or Expression with resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeDataset, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeDataset' # type: str + self.table = kwargs.get('table', None) + self.database = kwargs.get('database', None) + + +class ExportSettings(msrest.serialization.Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The export setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand', 'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} + } + + def __init__( + self, + **kwargs + ): + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'ExportSettings' # type: str + + +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The export setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta + Lake Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) + + +class ImportSettings(msrest.serialization.Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__( + self, + **kwargs + ): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'ImportSettings' # type: str + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: + string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be used for all runs of + this job. Type: string (or Expression with resultType string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLake' # type: str + self.domain = kwargs['domain'] + self.access_token = kwargs['access_token'] + self.cluster_id = kwargs.get('cluster_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: ~azure.synapse.artifacts.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeSink, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.import_settings = kwargs.get('import_settings', None) + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: ~azure.synapse.artifacts.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeSource, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeSource' # type: str + self.query = kwargs.get('query', None) + self.export_settings = kwargs.get('export_settings', None) + + class AzureDatabricksLinkedService(LinkedService): """Azure Databricks linked service. @@ -2443,10 +3203,16 @@ class AzureDatabricksLinkedService(LinkedService): :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to + :param access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param authentication: Required to specify MSI, if using Workspace resource id for databricks + REST API. Type: string (or Expression with resultType string). + :type authentication: object + :param workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or + Expression with resultType string). + :type workspace_resource_id: object :param existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). :type existing_cluster_id: object @@ -2478,6 +3244,9 @@ class AzureDatabricksLinkedService(LinkedService): :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored in instance pool configurations. :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and + event logs. Type: string (or Expression with resultType string). + :type new_cluster_log_destination: object :param new_cluster_driver_node_type: The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). @@ -2493,12 +3262,14 @@ class AzureDatabricksLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param policy_id: The policy id for limiting the ability to configure clusters based on a user + defined set of rules. Type: string (or Expression with resultType string). + :type policy_id: object """ _validation = { 'type': {'required': True}, 'domain': {'required': True}, - 'access_token': {'required': True}, } _attribute_map = { @@ -2510,6 +3281,8 @@ class AzureDatabricksLinkedService(LinkedService): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, + 'workspace_resource_id': {'key': 'typeProperties.workspaceResourceId', 'type': 'object'}, 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, @@ -2518,10 +3291,12 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_log_destination': {'key': 'typeProperties.newClusterLogDestination', 'type': 'object'}, 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, } def __init__( @@ -2531,7 +3306,9 @@ def __init__( super(AzureDatabricksLinkedService, self).__init__(**kwargs) self.type = 'AzureDatabricks' # type: str self.domain = kwargs['domain'] - self.access_token = kwargs['access_token'] + self.access_token = kwargs.get('access_token', None) + self.authentication = kwargs.get('authentication', None) + self.workspace_resource_id = kwargs.get('workspace_resource_id', None) self.existing_cluster_id = kwargs.get('existing_cluster_id', None) self.instance_pool_id = kwargs.get('instance_pool_id', None) self.new_cluster_version = kwargs.get('new_cluster_version', None) @@ -2540,10 +3317,12 @@ def __init__( self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_log_destination = kwargs.get('new_cluster_log_destination', None) self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.policy_id = kwargs.get('policy_id', None) class ExecutionActivity(Activity): @@ -2826,6 +3605,9 @@ class AzureDataExplorerSource(CopySource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -2842,6 +3624,7 @@ class AzureDataExplorerSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -2853,6 +3636,7 @@ def __init__( self.query = kwargs['query'] self.no_truncation = kwargs.get('no_truncation', None) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) class AzureDataExplorerTableDataset(Dataset): @@ -2998,6 +3782,79 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) +class AzureDataLakeStoreDataset(Dataset): + """Azure Data Lake Store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or + Expression with resultType string). + :type folder_path: object + :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or + Expression with resultType string). + :type file_name: object + :param format: The format of the Data Lake Store. + :type format: ~azure.synapse.artifacts.models.DatasetStorageFormat + :param compression: The data compression method used for the item(s) in the Azure Data Lake + Store. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDataLakeStoreDataset, self).__init__(**kwargs) + self.type = 'AzureDataLakeStoreFile' # type: str + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + + class AzureDataLakeStoreLinkedService(LinkedService): """Azure Data Lake Store linked service. @@ -3028,6 +3885,10 @@ class AzureDataLakeStoreLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param account_name: Data Lake Store account name. Type: string (or Expression with resultType string). :type account_name: object @@ -3059,6 +3920,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, @@ -3075,6 +3937,7 @@ def __init__( self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.account_name = kwargs.get('account_name', None) self.subscription_id = kwargs.get('subscription_id', None) self.resource_group_name = kwargs.get('resource_group_name', None) @@ -3140,8 +4003,26 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object + :param list_after: Lists files after the value (exclusive) based on file/folder names’ + lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders + under the folderPath. Type: string (or Expression with resultType string). + :type list_after: object + :param list_before: Lists files before the value (inclusive) based on file/folder names’ + lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders + under the folderPath. Type: string (or Expression with resultType string). + :type list_before: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -3161,7 +4042,12 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'list_after': {'key': 'listAfter', 'type': 'object'}, + 'list_before': {'key': 'listBefore', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -3175,7 +4061,12 @@ def __init__( self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) + self.list_after = kwargs.get('list_after', None) + self.list_before = kwargs.get('list_before', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -3298,6 +4189,10 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :type max_concurrent_connections: object :param copy_behavior: The type of copy behavior for copy sink. :type copy_behavior: object + :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to + the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: integer + (or Expression with resultType integer). + :type expiry_date_time: object """ _validation = { @@ -3309,6 +4204,7 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } def __init__( @@ -3317,6 +4213,7 @@ def __init__( ): super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) self.type = 'AzureDataLakeStoreWriteSettings' # type: str + self.expiry_date_time = kwargs.get('expiry_date_time', None) class Resource(msrest.serialization.Model): @@ -3421,6 +4318,22 @@ class AzureFileStorageLinkedService(LinkedService): :type user_id: object :param password: Password to logon the server. :type password: ~azure.synapse.artifacts.models.SecretBase + :param connection_string: The connection string. It is mutually exclusive with sasUri property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param file_share: The azure file share name. It is required when auth with + accountKey/sasToken. Type: string (or Expression with resultType string). + :type file_share: object + :param snapshot: The azure file share snapshot version. Type: string (or Expression with + resultType string). + :type snapshot: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -3442,6 +4355,12 @@ class AzureFileStorageLinkedService(LinkedService): 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'file_share': {'key': 'typeProperties.fileShare', 'type': 'object'}, + 'snapshot': {'key': 'typeProperties.snapshot', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -3454,6 +4373,12 @@ def __init__( self.host = kwargs['host'] self.user_id = kwargs.get('user_id', None) self.password = kwargs.get('password', None) + self.connection_string = kwargs.get('connection_string', None) + self.account_key = kwargs.get('account_key', None) + self.sas_uri = kwargs.get('sas_uri', None) + self.sas_token = kwargs.get('sas_token', None) + self.file_share = kwargs.get('file_share', None) + self.snapshot = kwargs.get('snapshot', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -3516,8 +4441,21 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param prefix: The prefix filter for the Azure File name starting from root path. Type: string + (or Expression with resultType string). + :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -3537,7 +4475,11 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -3551,11 +4493,51 @@ def __init__( self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) +class AzureFileStorageWriteSettings(StoreWriteSettings): + """Azure File Storage write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureFileStorageWriteSettings, self).__init__(**kwargs) + self.type = 'AzureFileStorageWriteSettings' # type: str + + class AzureFunctionActivity(ExecutionActivity): """Azure Function activity. @@ -3882,6 +4864,9 @@ class AzureMariaDBSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -3898,6 +4883,7 @@ class AzureMariaDBSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -4509,6 +5495,9 @@ class AzureMySqlSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -4524,6 +5513,7 @@ class AzureMySqlSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -4732,6 +5722,9 @@ class AzurePostgreSqlSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -4748,6 +5741,7 @@ class AzurePostgreSqlSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5081,6 +6075,10 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5104,6 +6102,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -5118,6 +6117,7 @@ def __init__( self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -5153,6 +6153,10 @@ class AzureSqlDWLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5176,6 +6180,7 @@ class AzureSqlDWLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -5190,6 +6195,7 @@ def __init__( self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -5294,6 +6300,10 @@ class AzureSqlMILinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5317,6 +6327,7 @@ class AzureSqlMILinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -5331,6 +6342,7 @@ def __init__( self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -5504,6 +6516,9 @@ class AzureSqlSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -5516,6 +6531,11 @@ class AzureSqlSource(TabularSource): ~azure.synapse.artifacts.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object + :param partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: object + :param partition_settings: The settings that will be leveraged for Sql source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SqlPartitionSettings """ _validation = { @@ -5529,10 +6549,13 @@ class AzureSqlSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -5545,6 +6568,8 @@ def __init__( self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) class AzureSqlTableDataset(Dataset): @@ -5832,6 +6857,9 @@ class AzureTableSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object @@ -5851,6 +6879,7 @@ class AzureTableSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } @@ -6209,6 +7238,76 @@ def __init__( self.compression = kwargs.get('compression', None) +class FormatReadSettings(msrest.serialization.Model): + """Format read settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BinaryReadSettings, DelimitedTextReadSettings, JsonReadSettings, XmlReadSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'BinaryReadSettings': 'BinaryReadSettings', 'DelimitedTextReadSettings': 'DelimitedTextReadSettings', 'JsonReadSettings': 'JsonReadSettings', 'XmlReadSettings': 'XmlReadSettings'} + } + + def __init__( + self, + **kwargs + ): + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'FormatReadSettings' # type: str + + +class BinaryReadSettings(FormatReadSettings): + """Binary read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param compression_properties: Compression settings. + :type compression_properties: ~azure.synapse.artifacts.models.CompressionReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(BinaryReadSettings, self).__init__(**kwargs) + self.type = 'BinaryReadSettings' # type: str + self.compression_properties = kwargs.get('compression_properties', None) + + class BinarySink(CopySink): """A copy activity Binary sink. @@ -6283,6 +7382,8 @@ class BinarySource(CopySource): :type max_concurrent_connections: object :param store_settings: Binary store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param format_settings: Binary format settings. + :type format_settings: ~azure.synapse.artifacts.models.BinaryReadSettings """ _validation = { @@ -6296,6 +7397,7 @@ class BinarySource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } def __init__( @@ -6305,6 +7407,7 @@ def __init__( super(BinarySource, self).__init__(**kwargs) self.type = 'BinarySource' # type: str self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) class Trigger(msrest.serialization.Model): @@ -6364,7 +7467,7 @@ class MultiplePipelineTrigger(Trigger): """Base class for all triggers that support one to many model for trigger to pipeline. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger. + sub-classes are: BlobEventsTrigger, BlobTrigger, CustomEventsTrigger, ScheduleTrigger. Variables are only populated by the server, and will be ignored when sending a request. @@ -6401,7 +7504,7 @@ class MultiplePipelineTrigger(Trigger): } _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'CustomEventsTrigger': 'CustomEventsTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__( @@ -6764,6 +7867,9 @@ class CassandraSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). :type query: object @@ -6788,6 +7894,7 @@ class CassandraSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } @@ -7076,12 +8183,9 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential_type: A string from ServicePrincipalCredentialEnum or an + expression. + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -7116,7 +8220,7 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -7229,6 +8333,9 @@ class CommonDataServiceForAppsSource(CopySource): :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -7242,6 +8349,7 @@ class CommonDataServiceForAppsSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -7251,6 +8359,44 @@ def __init__( super(CommonDataServiceForAppsSource, self).__init__(**kwargs) self.type = 'CommonDataServiceForAppsSource' # type: str self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) + + +class CompressionReadSettings(msrest.serialization.Model): + """Compression read settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + } + + def __init__( + self, + **kwargs + ): + super(CompressionReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'CompressionReadSettings' # type: str class ConcurLinkedService(LinkedService): @@ -7271,6 +8417,9 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Concur. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object :param username: Required. The user name that you use to access Concur Service. @@ -7307,6 +8456,7 @@ class ConcurLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -7322,6 +8472,7 @@ def __init__( ): super(ConcurLinkedService, self).__init__(**kwargs) self.type = 'Concur' # type: str + self.connection_properties = kwargs.get('connection_properties', None) self.client_id = kwargs['client_id'] self.username = kwargs['username'] self.password = kwargs.get('password', None) @@ -7411,6 +8562,9 @@ class ConcurSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -7427,6 +8581,7 @@ class ConcurSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -7490,10 +8645,20 @@ class CopyActivity(ExecutionActivity): EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.synapse.artifacts.models.RedirectIncompatibleRowSettings + :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer + need to provide when enabling session log. + :type log_storage_settings: ~azure.synapse.artifacts.models.LogStorageSettings + :param log_settings: Log settings customer needs provide when enabling log. + :type log_settings: ~azure.synapse.artifacts.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. :type preserve: list[object] + :param validate_data_consistency: Whether to enable Data Consistency validation. Type: boolean + (or Expression with resultType boolean). + :type validate_data_consistency: object + :param skip_error_file: Specify the fault tolerance for data consistency. + :type skip_error_file: ~azure.synapse.artifacts.models.SkipErrorFile """ _validation = { @@ -7523,8 +8688,12 @@ class CopyActivity(ExecutionActivity): 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, + 'skip_error_file': {'key': 'typeProperties.skipErrorFile', 'type': 'SkipErrorFile'}, } def __init__( @@ -7544,8 +8713,37 @@ def __init__( self.data_integration_units = kwargs.get('data_integration_units', None) self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) + self.log_storage_settings = kwargs.get('log_storage_settings', None) + self.log_settings = kwargs.get('log_settings', None) self.preserve_rules = kwargs.get('preserve_rules', None) self.preserve = kwargs.get('preserve', None) + self.validate_data_consistency = kwargs.get('validate_data_consistency', None) + self.skip_error_file = kwargs.get('skip_error_file', None) + + +class CopyActivityLogSettings(msrest.serialization.Model): + """Settings for copy activity log. + + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object + """ + + _attribute_map = { + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(CopyActivityLogSettings, self).__init__(**kwargs) + self.log_level = kwargs.get('log_level', None) + self.enable_reliable_logging = kwargs.get('enable_reliable_logging', None) class CopyTranslator(msrest.serialization.Model): @@ -7853,6 +9051,9 @@ class CosmosDbMongoDbApiSource(CopySource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -7869,6 +9070,7 @@ class CosmosDbMongoDbApiSource(CopySource): 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -7881,6 +9083,7 @@ def __init__( self.cursor_methods = kwargs.get('cursor_methods', None) self.batch_size = kwargs.get('batch_size', None) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) class CosmosDbSqlApiCollectionDataset(Dataset): @@ -8024,6 +9227,12 @@ class CosmosDbSqlApiSource(CopySource): :param preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). :type preferred_regions: object + :param detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or + Expression with resultType boolean). + :type detect_datetime: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -8039,6 +9248,8 @@ class CosmosDbSqlApiSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, + 'detect_datetime': {'key': 'detectDatetime', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -8050,6 +9261,8 @@ def __init__( self.query = kwargs.get('query', None) self.page_size = kwargs.get('page_size', None) self.preferred_regions = kwargs.get('preferred_regions', None) + self.detect_datetime = kwargs.get('detect_datetime', None) + self.additional_columns = kwargs.get('additional_columns', None) class CouchbaseLinkedService(LinkedService): @@ -8130,6 +9343,9 @@ class CouchbaseSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -8146,6 +9362,7 @@ class CouchbaseSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -8338,6 +9555,9 @@ class CustomActivity(ExecutionActivity): :param retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). :type retention_time_in_days: object + :param auto_user_specification: Elevation level and scope for the user, default is nonadmin + task. Type: string (or Expression with resultType double). + :type auto_user_specification: object """ _validation = { @@ -8361,6 +9581,7 @@ class CustomActivity(ExecutionActivity): 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + 'auto_user_specification': {'key': 'typeProperties.autoUserSpecification', 'type': 'object'}, } def __init__( @@ -8375,6 +9596,7 @@ def __init__( self.reference_objects = kwargs.get('reference_objects', None) self.extended_properties = kwargs.get('extended_properties', None) self.retention_time_in_days = kwargs.get('retention_time_in_days', None) + self.auto_user_specification = kwargs.get('auto_user_specification', None) class CustomActivityReferenceObject(msrest.serialization.Model): @@ -8533,6 +9755,71 @@ def __init__( self.key = kwargs.get('key', None) +class CustomEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a custom event is received. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param subject_begins_with: The event subject must begin with the pattern provided for trigger + to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :type subject_begins_with: str + :param subject_ends_with: The event subject must end with the pattern provided for trigger to + fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :type subject_ends_with: str + :param events: Required. The list of event types that cause this trigger to fire. + :type events: list[object] + :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :type scope: str + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'subject_begins_with': {'key': 'typeProperties.subjectBeginsWith', 'type': 'str'}, + 'subject_ends_with': {'key': 'typeProperties.subjectEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[object]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CustomEventsTrigger, self).__init__(**kwargs) + self.type = 'CustomEventsTrigger' # type: str + self.subject_begins_with = kwargs.get('subject_begins_with', None) + self.subject_ends_with = kwargs.get('subject_ends_with', None) + self.events = kwargs['events'] + self.scope = kwargs['scope'] + + class CustomSetupBase(msrest.serialization.Model): """The base definition of the custom setup. @@ -9348,6 +10635,10 @@ class DataFlowSink(Transformation): :type description: str :param dataset: Dataset reference. :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param linked_service: Linked service reference. + :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param schema_linked_service: Schema linked service reference. + :type schema_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference """ _validation = { @@ -9358,6 +10649,8 @@ class DataFlowSink(Transformation): 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, + 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, } def __init__( @@ -9366,6 +10659,8 @@ def __init__( ): super(DataFlowSink, self).__init__(**kwargs) self.dataset = kwargs.get('dataset', None) + self.linked_service = kwargs.get('linked_service', None) + self.schema_linked_service = kwargs.get('schema_linked_service', None) class DataFlowSource(Transformation): @@ -9379,6 +10674,10 @@ class DataFlowSource(Transformation): :type description: str :param dataset: Dataset reference. :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param linked_service: Linked service reference. + :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param schema_linked_service: Schema linked service reference. + :type schema_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference """ _validation = { @@ -9389,6 +10688,8 @@ class DataFlowSource(Transformation): 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, + 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, } def __init__( @@ -9397,6 +10698,8 @@ def __init__( ): super(DataFlowSource, self).__init__(**kwargs) self.dataset = kwargs.get('dataset', None) + self.linked_service = kwargs.get('linked_service', None) + self.schema_linked_service = kwargs.get('schema_linked_service', None) class DataFlowSourceSetting(msrest.serialization.Model): @@ -9561,7 +10864,7 @@ class DatasetCompression(msrest.serialization.Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetTarCompression, DatasetTarGZipCompression, DatasetZipDeflateCompression. All required parameters must be populated in order to send to Azure. @@ -9582,7 +10885,7 @@ class DatasetCompression(msrest.serialization.Model): } _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'Tar': 'DatasetTarCompression', 'TarGZip': 'DatasetTarGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} } def __init__( @@ -9684,8 +10987,8 @@ class DatasetDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -9695,7 +10998,7 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -9736,8 +11039,8 @@ class DatasetGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -9747,7 +11050,7 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -9895,6 +11198,68 @@ def __init__( self.type = kwargs.get('type', None) +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetTarCompression, self).__init__(**kwargs) + self.type = 'Tar' # type: str + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The TarGZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetTarGZipCompression, self).__init__(**kwargs) + self.type = 'TarGZip' # type: str + self.level = kwargs.get('level', None) + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -9905,8 +11270,8 @@ class DatasetZipDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -9916,7 +11281,7 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( @@ -9946,29 +11311,34 @@ class Db2LinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param server: Required. Server name for connection. Type: string (or Expression with - resultType string). + :param connection_string: The connection string. It is mutually exclusive with server, + database, authenticationType, userName, packageCollection and certificateCommonName property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Required. Server name for connection. It is mutually exclusive with + connectionString property. Type: string (or Expression with resultType string). :type server: object - :param database: Required. Database name for connection. Type: string (or Expression with - resultType string). + :param database: Required. Database name for connection. It is mutually exclusive with + connectionString property. Type: string (or Expression with resultType string). :type database: object - :param authentication_type: AuthenticationType to be used for connection. Possible values - include: "Basic". + :param authentication_type: AuthenticationType to be used for connection. It is mutually + exclusive with connectionString property. Possible values include: "Basic". :type authentication_type: str or ~azure.synapse.artifacts.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression with resultType - string). + :param username: Username for authentication. It is mutually exclusive with connectionString + property. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. :type password: ~azure.synapse.artifacts.models.SecretBase - :param package_collection: Under where packages are created when querying database. Type: - string (or Expression with resultType string). + :param package_collection: Under where packages are created when querying database. It is + mutually exclusive with connectionString property. Type: string (or Expression with resultType + string). :type package_collection: object - :param certificate_common_name: Certificate Common Name when TLS is enabled. Type: string (or - Expression with resultType string). + :param certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually + exclusive with connectionString property. Type: string (or Expression with resultType string). :type certificate_common_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). + encrypted using the integration runtime credential manager. It is mutually exclusive with + connectionString property. Type: string (or Expression with resultType string). :type encrypted_credential: object """ @@ -9985,6 +11355,7 @@ class Db2LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, @@ -10001,6 +11372,7 @@ def __init__( ): super(Db2LinkedService, self).__init__(**kwargs) self.type = 'Db2' # type: str + self.connection_string = kwargs.get('connection_string', None) self.server = kwargs['server'] self.database = kwargs['database'] self.authentication_type = kwargs.get('authentication_type', None) @@ -10033,6 +11405,9 @@ class Db2Source(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -10048,6 +11423,7 @@ class Db2Source(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -10164,6 +11540,8 @@ class DeleteActivity(ExecutionActivity): :type log_storage_settings: ~azure.synapse.artifacts.models.LogStorageSettings :param dataset: Required. Delete activity dataset reference. :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param store_settings: Delete activity store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ _validation = { @@ -10187,6 +11565,7 @@ class DeleteActivity(ExecutionActivity): 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'store_settings': {'key': 'typeProperties.storeSettings', 'type': 'StoreReadSettings'}, } def __init__( @@ -10200,6 +11579,7 @@ def __init__( self.enable_logging = kwargs.get('enable_logging', None) self.log_storage_settings = kwargs.get('log_storage_settings', None) self.dataset = kwargs['dataset'] + self.store_settings = kwargs.get('store_settings', None) class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): @@ -10266,11 +11646,10 @@ class DelimitedTextDataset(Dataset): resultType string). :type encoding_name: object :param compression_codec: Possible values include: "bzip2", "gzip", "deflate", "zipDeflate", - "snappy", "lz4". - :type compression_codec: str or ~azure.synapse.artifacts.models.DelimitedTextCompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + "snappy", "lz4", "tar", "tarGZip". + :type compression_codec: str or ~azure.synapse.artifacts.models.CompressionCodec + :param compression_level: The data compression method used for DelimitedText. + :type compression_level: object :param quote_char: The quote character. Type: string (or Expression with resultType string). :type quote_char: object :param escape_char: The escape character. Type: string (or Expression with resultType string). @@ -10303,7 +11682,7 @@ class DelimitedTextDataset(Dataset): 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, @@ -10328,43 +11707,6 @@ def __init__( self.null_value = kwargs.get('null_value', None) -class FormatReadSettings(msrest.serialization.Model): - """Format read settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DelimitedTextReadSettings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'DelimitedTextReadSettings': 'DelimitedTextReadSettings'} - } - - def __init__( - self, - **kwargs - ): - super(FormatReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'FormatReadSettings' # type: str - - class DelimitedTextReadSettings(FormatReadSettings): """Delimited text read settings. @@ -10378,6 +11720,8 @@ class DelimitedTextReadSettings(FormatReadSettings): :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). :type skip_line_count: object + :param compression_properties: Compression settings. + :type compression_properties: ~azure.synapse.artifacts.models.CompressionReadSettings """ _validation = { @@ -10388,6 +11732,7 @@ class DelimitedTextReadSettings(FormatReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, } def __init__( @@ -10397,6 +11742,7 @@ def __init__( super(DelimitedTextReadSettings, self).__init__(**kwargs) self.type = 'DelimitedTextReadSettings' # type: str self.skip_line_count = kwargs.get('skip_line_count', None) + self.compression_properties = kwargs.get('compression_properties', None) class DelimitedTextSink(CopySink): @@ -10479,6 +11825,9 @@ class DelimitedTextSource(CopySource): :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings :param format_settings: DelimitedText format settings. :type format_settings: ~azure.synapse.artifacts.models.DelimitedTextReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -10493,6 +11842,7 @@ class DelimitedTextSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -10503,6 +11853,7 @@ def __init__( self.type = 'DelimitedTextSource' # type: str self.store_settings = kwargs.get('store_settings', None) self.format_settings = kwargs.get('format_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) class DelimitedTextWriteSettings(FormatWriteSettings): @@ -10521,6 +11872,13 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -10533,6 +11891,8 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -10543,6 +11903,8 @@ def __init__( self.type = 'DelimitedTextWriteSettings' # type: str self.quote_all_text = kwargs.get('quote_all_text', None) self.file_extension = kwargs['file_extension'] + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) class DependencyReference(msrest.serialization.Model): @@ -10761,6 +12123,9 @@ class DocumentDbCollectionSource(CopySource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -10776,6 +12141,7 @@ class DocumentDbCollectionSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -10787,6 +12153,7 @@ def __init__( self.query = kwargs.get('query', None) self.nesting_separator = kwargs.get('nesting_separator', None) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) class DrillLinkedService(LinkedService): @@ -10867,6 +12234,9 @@ class DrillSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -10883,6 +12253,7 @@ class DrillSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -11195,9 +12566,17 @@ class DynamicsAXSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -11211,7 +12590,9 @@ class DynamicsAXSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__( @@ -11221,6 +12602,7 @@ def __init__( super(DynamicsAXSource, self).__init__(**kwargs) self.type = 'DynamicsAXSource' # type: str self.query = kwargs.get('query', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) class DynamicsCrmEntityDataset(Dataset): @@ -11334,12 +12716,9 @@ class DynamicsCrmLinkedService(LinkedService): :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential_type: A string from ServicePrincipalCredentialEnum or an + expression. + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -11374,7 +12753,7 @@ class DynamicsCrmLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -11487,6 +12866,9 @@ class DynamicsCrmSource(CopySource): :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -11500,6 +12882,7 @@ class DynamicsCrmSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -11509,6 +12892,7 @@ def __init__( super(DynamicsCrmSource, self).__init__(**kwargs) self.type = 'DynamicsCrmSource' # type: str self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) class DynamicsEntityDataset(Dataset): @@ -11594,18 +12978,18 @@ class DynamicsLinkedService(LinkedService): :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: str + :type host_name: object :param port: The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: str + :type port: object :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: str + :type service_uri: object :param organization_name: The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: str + :type organization_name: object :param authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with @@ -11651,10 +13035,10 @@ class DynamicsLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'str'}, - 'port': {'key': 'typeProperties.port', 'type': 'str'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'str'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -11772,6 +13156,9 @@ class DynamicsSource(CopySource): :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -11785,6 +13172,7 @@ class DynamicsSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -11794,6 +13182,7 @@ def __init__( super(DynamicsSource, self).__init__(**kwargs) self.type = 'DynamicsSource' # type: str self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) class EloquaLinkedService(LinkedService): @@ -11954,6 +13343,9 @@ class EloquaSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -11970,6 +13362,7 @@ class EloquaSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -12164,6 +13557,138 @@ def __init__( self.expression = kwargs.get('expression', None) +class ExcelDataset(Dataset): + """Excel dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the excel storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType + string). + :type sheet_name: object + :param range: The partial data of one sheet. Type: string (or Expression with resultType + string). + :type range: object + :param first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :type first_row_as_header: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + :param null_value: The null value string. Type: string (or Expression with resultType string). + :type null_value: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, + 'range': {'key': 'typeProperties.range', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ExcelDataset, self).__init__(**kwargs) + self.type = 'Excel' # type: str + self.location = kwargs.get('location', None) + self.sheet_name = kwargs.get('sheet_name', None) + self.range = kwargs.get('range', None) + self.first_row_as_header = kwargs.get('first_row_as_header', None) + self.compression = kwargs.get('compression', None) + self.null_value = kwargs.get('null_value', None) + + +class ExcelSource(CopySource): + """A copy activity excel source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Excel store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + **kwargs + ): + super(ExcelSource, self).__init__(**kwargs) + self.type = 'ExcelSource' # type: str + self.store_settings = kwargs.get('store_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) + + class ExecuteDataFlowActivity(ExecutionActivity): """Execute data flow activity. @@ -12186,20 +13711,29 @@ class ExecuteDataFlowActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.synapse.artifacts.models.DataFlowReference + :param dataflow: Data flow reference. + :type dataflow: ~azure.synapse.artifacts.models.DataFlowReference :param staging: Staging info for execute data flow activity. :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo :param integration_runtime: The integration runtime reference. :type integration_runtime: ~azure.synapse.artifacts.models.IntegrationRuntimeReference :param compute: Compute properties for data flow activity. :type compute: ~azure.synapse.artifacts.models.ExecuteDataFlowActivityTypePropertiesCompute + :param trace_level: Trace level setting used for data flow monitoring output. Supported values + are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :type trace_level: object + :param continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :type continue_on_error: object + :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + the same save order to be processed concurrently. Type: boolean (or Expression with resultType + boolean). + :type run_concurrently: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'data_flow': {'required': True}, } _attribute_map = { @@ -12211,10 +13745,13 @@ class ExecuteDataFlowActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, + 'dataflow': {'key': 'typeProperties.dataflow', 'type': 'DataFlowReference'}, 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + 'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'}, + 'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'}, + 'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'}, } def __init__( @@ -12223,10 +13760,13 @@ def __init__( ): super(ExecuteDataFlowActivity, self).__init__(**kwargs) self.type = 'ExecuteDataFlow' # type: str - self.data_flow = kwargs['data_flow'] + self.dataflow = kwargs.get('dataflow', None) self.staging = kwargs.get('staging', None) self.integration_runtime = kwargs.get('integration_runtime', None) self.compute = kwargs.get('compute', None) + self.trace_level = kwargs.get('trace_level', None) + self.continue_on_error = kwargs.get('continue_on_error', None) + self.run_concurrently = kwargs.get('run_concurrently', None) class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): @@ -12619,14 +14159,27 @@ class FileServerReadSettings(StoreReadSettings): :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object + :param file_filter: Specify a filter to be used to select a subset of files in the folderPath + rather than all files. Type: string (or Expression with resultType string). + :type file_filter: object """ _validation = { @@ -12640,9 +14193,13 @@ class FileServerReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'file_filter': {'key': 'fileFilter', 'type': 'object'}, } def __init__( @@ -12654,9 +14211,13 @@ def __init__( self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.file_filter = kwargs.get('file_filter', None) class FileServerWriteSettings(StoreWriteSettings): @@ -12695,6 +14256,93 @@ def __init__( self.type = 'FileServerWriteSettings' # type: str +class FileShareDataset(Dataset): + """An on-premises file system dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param folder_path: The path of the on-premises file system. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: The name of the on-premises file system. Type: string (or Expression with + resultType string). + :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + :param format: The format of the files. + :type format: ~azure.synapse.artifacts.models.DatasetStorageFormat + :param file_filter: Specify a filter to be used to select a subset of files in the folderPath + rather than all files. Type: string (or Expression with resultType string). + :type file_filter: object + :param compression: The data compression method used for the file system. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + **kwargs + ): + super(FileShareDataset, self).__init__(**kwargs) + self.type = 'FileShare' # type: str + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.format = kwargs.get('format', None) + self.file_filter = kwargs.get('file_filter', None) + self.compression = kwargs.get('compression', None) + + class FileSystemSink(CopySink): """A copy activity file system sink. @@ -12770,6 +14418,9 @@ class FileSystemSource(CopySource): :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -12783,6 +14434,7 @@ class FileSystemSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -12792,6 +14444,7 @@ def __init__( super(FileSystemSource, self).__init__(**kwargs) self.type = 'FileSystemSource' # type: str self.recursive = kwargs.get('recursive', None) + self.additional_columns = kwargs.get('additional_columns', None) class FilterActivity(ControlActivity): @@ -12930,6 +14583,18 @@ class FtpReadSettings(StoreReadSettings): :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. :type use_binary_transfer: bool """ @@ -12945,6 +14610,10 @@ class FtpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, } @@ -12957,6 +14626,10 @@ def __init__( self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) + self.file_list_path = kwargs.get('file_list_path', None) self.use_binary_transfer = kwargs.get('use_binary_transfer', None) @@ -13106,6 +14779,10 @@ class GetMetadataActivity(ExecutionActivity): :type dataset: ~azure.synapse.artifacts.models.DatasetReference :param field_list: Fields of metadata to get from dataset. :type field_list: list[object] + :param store_settings: GetMetadata activity store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param format_settings: GetMetadata activity format settings. + :type format_settings: ~azure.synapse.artifacts.models.FormatReadSettings """ _validation = { @@ -13125,6 +14802,8 @@ class GetMetadataActivity(ExecutionActivity): 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + 'store_settings': {'key': 'typeProperties.storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'typeProperties.formatSettings', 'type': 'FormatReadSettings'}, } def __init__( @@ -13135,6 +14814,8 @@ def __init__( self.type = 'GetMetadata' # type: str self.dataset = kwargs['dataset'] self.field_list = kwargs.get('field_list', None) + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) class GetSsisObjectMetadataRequest(msrest.serialization.Model): @@ -13393,6 +15074,9 @@ class GoogleAdWordsSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13409,6 +15093,7 @@ class GoogleAdWordsSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -13618,6 +15303,9 @@ class GoogleBigQuerySource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13634,6 +15322,7 @@ class GoogleBigQuerySource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -13782,8 +15471,18 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -13804,7 +15503,10 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -13819,7 +15521,10 @@ def __init__( self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -13902,6 +15607,9 @@ class GreenplumSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13918,6 +15626,7 @@ class GreenplumSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14175,6 +15884,9 @@ class HBaseSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14191,6 +15903,7 @@ class HBaseSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14329,8 +16042,15 @@ class HdfsReadSettings(StoreReadSettings): :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -14339,6 +16059,9 @@ class HdfsReadSettings(StoreReadSettings): :type modified_datetime_end: object :param distcp_settings: Specifies Distcp-related settings. :type distcp_settings: ~azure.synapse.artifacts.models.DistcpSettings + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object """ _validation = { @@ -14352,10 +16075,13 @@ class HdfsReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, } def __init__( @@ -14367,10 +16093,13 @@ def __init__( self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.distcp_settings = kwargs.get('distcp_settings', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) class HdfsSource(CopySource): @@ -15361,6 +17090,9 @@ class HiveSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15377,6 +17109,7 @@ class HiveSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -15389,6 +17122,91 @@ def __init__( self.query = kwargs.get('query', None) +class HttpDataset(Dataset): + """A file in an HTTP web server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param relative_url: The relative URL based on the URL in the HttpLinkedService refers to an + HTTP file Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method for the HTTP request. Type: string (or Expression with + resultType string). + :type request_method: object + :param request_body: The body for the HTTP request. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The headers for the HTTP Request. e.g. request-header- + name-1:request-header-value-1 + ... + request-header-name-n:request-header-value-n Type: string (or Expression with resultType + string). + :type additional_headers: object + :param format: The format of files. + :type format: ~azure.synapse.artifacts.models.DatasetStorageFormat + :param compression: The data compression method used on files. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + **kwargs + ): + super(HttpDataset, self).__init__(**kwargs) + self.type = 'HttpFile' # type: str + self.relative_url = kwargs.get('relative_url', None) + self.request_method = kwargs.get('request_method', None) + self.request_body = kwargs.get('request_body', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.format = kwargs.get('format', None) + self.compression = kwargs.get('compression', None) + + class HttpLinkedService(LinkedService): """Linked service for an HTTP source. @@ -15501,6 +17319,11 @@ class HttpReadSettings(StoreReadSettings): :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. :type request_timeout: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object """ _validation = { @@ -15515,6 +17338,8 @@ class HttpReadSettings(StoreReadSettings): 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, } def __init__( @@ -15527,6 +17352,8 @@ def __init__( self.request_body = kwargs.get('request_body', None) self.additional_headers = kwargs.get('additional_headers', None) self.request_timeout = kwargs.get('request_timeout', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) class HttpServerLocation(DatasetLocation): @@ -15781,6 +17608,9 @@ class HubspotSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15797,6 +17627,7 @@ class HubspotSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16058,6 +17889,9 @@ class ImpalaSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16074,6 +17908,7 @@ class ImpalaSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16236,6 +18071,9 @@ class InformixSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -16251,6 +18089,7 @@ class InformixSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16908,6 +18747,9 @@ class JiraSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16924,6 +18766,7 @@ class JiraSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -17073,6 +18916,39 @@ def __init__( self.json_path_definition = kwargs.get('json_path_definition', None) +class JsonReadSettings(FormatReadSettings): + """Json read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param compression_properties: Compression settings. + :type compression_properties: ~azure.synapse.artifacts.models.CompressionReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(JsonReadSettings, self).__init__(**kwargs) + self.type = 'JsonReadSettings' # type: str + self.compression_properties = kwargs.get('compression_properties', None) + + class JsonSink(CopySink): """A copy activity Json sink. @@ -17151,6 +19027,11 @@ class JsonSource(CopySource): :type max_concurrent_connections: object :param store_settings: Json store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.synapse.artifacts.models.JsonReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -17164,6 +19045,8 @@ class JsonSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -17173,6 +19056,8 @@ def __init__( super(JsonSource, self).__init__(**kwargs) self.type = 'JsonSource' # type: str self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) class JsonWriteSettings(FormatWriteSettings): @@ -17721,8 +19606,73 @@ def __init__( self.properties = kwargs['properties'] +class LogLocationSettings(msrest.serialization.Model): + """Log location settings. + + All required parameters must be populated in order to send to Azure. + + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(LogLocationSettings, self).__init__(**kwargs) + self.linked_service_name = kwargs['linked_service_name'] + self.path = kwargs.get('path', None) + + +class LogSettings(msrest.serialization.Model): + """Log settings. + + All required parameters must be populated in order to send to Azure. + + :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + (or Expression with resultType boolean). + :type enable_copy_activity_log: object + :param copy_activity_log_settings: Specifies settings for copy activity log. + :type copy_activity_log_settings: ~azure.synapse.artifacts.models.CopyActivityLogSettings + :param log_location_settings: Required. Log location settings customer needs to provide when + enabling log. + :type log_location_settings: ~azure.synapse.artifacts.models.LogLocationSettings + """ + + _validation = { + 'log_location_settings': {'required': True}, + } + + _attribute_map = { + 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, + 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, + 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(LogSettings, self).__init__(**kwargs) + self.enable_copy_activity_log = kwargs.get('enable_copy_activity_log', None) + self.copy_activity_log_settings = kwargs.get('copy_activity_log_settings', None) + self.log_location_settings = kwargs['log_location_settings'] + + class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. + """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. @@ -17734,6 +19684,12 @@ class LogStorageSettings(msrest.serialization.Model): :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). :type path: object + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object """ _validation = { @@ -17744,6 +19700,8 @@ class LogStorageSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'path': {'key': 'path', 'type': 'object'}, + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, } def __init__( @@ -17754,6 +19712,8 @@ def __init__( self.additional_properties = kwargs.get('additional_properties', None) self.linked_service_name = kwargs['linked_service_name'] self.path = kwargs.get('path', None) + self.log_level = kwargs.get('log_level', None) + self.enable_reliable_logging = kwargs.get('enable_reliable_logging', None) class LookupActivity(ExecutionActivity): @@ -17971,6 +19931,9 @@ class MagentoSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -17987,6 +19950,7 @@ class MagentoSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18053,6 +20017,8 @@ class ManagedIntegrationRuntime(IntegrationRuntime): Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". :vartype state: str or ~azure.synapse.artifacts.models.IntegrationRuntimeState + :param managed_virtual_network: Managed Virtual Network reference. + :type managed_virtual_network: ~azure.synapse.artifacts.models.ManagedVirtualNetworkReference :param compute_properties: The compute resource for managed integration runtime. :type compute_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeComputeProperties :param ssis_properties: SSIS properties for managed integration runtime. @@ -18069,6 +20035,7 @@ class ManagedIntegrationRuntime(IntegrationRuntime): 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, + 'managed_virtual_network': {'key': 'managedVirtualNetwork', 'type': 'ManagedVirtualNetworkReference'}, 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, } @@ -18080,10 +20047,45 @@ def __init__( super(ManagedIntegrationRuntime, self).__init__(**kwargs) self.type = 'Managed' # type: str self.state = None + self.managed_virtual_network = kwargs.get('managed_virtual_network', None) self.compute_properties = kwargs.get('compute_properties', None) self.ssis_properties = kwargs.get('ssis_properties', None) +class ManagedVirtualNetworkReference(msrest.serialization.Model): + """Managed Virtual Network reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Managed Virtual Network reference type. Default value: + "ManagedVirtualNetworkReference". + :vartype type: str + :param reference_name: Required. Reference ManagedVirtualNetwork name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "ManagedVirtualNetworkReference" + + def __init__( + self, + **kwargs + ): + super(ManagedVirtualNetworkReference, self).__init__(**kwargs) + self.reference_name = kwargs['reference_name'] + + class ManagedVirtualNetworkSettings(msrest.serialization.Model): """Managed Virtual Network Settings. @@ -18240,6 +20242,9 @@ class MariaDBSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18256,6 +20261,7 @@ class MariaDBSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18483,6 +20489,9 @@ class MarketoSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18499,6 +20508,7 @@ class MarketoSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18660,6 +20670,9 @@ class MicrosoftAccessSource(CopySource): :type max_concurrent_connections: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -18673,6 +20686,7 @@ class MicrosoftAccessSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -18682,6 +20696,7 @@ def __init__( super(MicrosoftAccessSource, self).__init__(**kwargs) self.type = 'MicrosoftAccessSource' # type: str self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) class MicrosoftAccessTableDataset(Dataset): @@ -18743,6 +20758,188 @@ def __init__( self.table_name = kwargs.get('table_name', None) +class MongoDbAtlasCollectionDataset(Dataset): + """The MongoDB Atlas database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param collection: Required. The collection name of the MongoDB Atlas database. Type: string + (or Expression with resultType string). + :type collection: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'collection': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbAtlasCollectionDataset, self).__init__(**kwargs) + self.type = 'MongoDbAtlasCollection' # type: str + self.collection = kwargs['collection'] + + +class MongoDbAtlasLinkedService(LinkedService): + """Linked service for MongoDB Atlas data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The MongoDB Atlas connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB Atlas database that you want to access. + Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbAtlasLinkedService, self).__init__(**kwargs) + self.type = 'MongoDbAtlas' # type: str + self.connection_string = kwargs['connection_string'] + self.database = kwargs['database'] + + +class MongoDbAtlasSource(CopySource): + """A copy activity source for a MongoDB Atlas database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~azure.synapse.artifacts.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user + or the application. This property's main purpose is to avoid hit the limitation of response + size. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbAtlasSource, self).__init__(**kwargs) + self.type = 'MongoDbAtlasSource' # type: str + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) + + class MongoDbCollectionDataset(Dataset): """The MongoDB database dataset. @@ -18961,6 +21158,9 @@ class MongoDbSource(CopySource): :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -18974,6 +21174,7 @@ class MongoDbSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -18983,6 +21184,7 @@ def __init__( super(MongoDbSource, self).__init__(**kwargs) self.type = 'MongoDbSource' # type: str self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) class MongoDbV2CollectionDataset(Dataset): @@ -19131,6 +21333,9 @@ class MongoDbV2Source(CopySource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -19147,6 +21352,7 @@ class MongoDbV2Source(CopySource): 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -19159,6 +21365,7 @@ def __init__( self.cursor_methods = kwargs.get('cursor_methods', None) self.batch_size = kwargs.get('batch_size', None) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) class MySqlLinkedService(LinkedService): @@ -19239,6 +21446,9 @@ class MySqlSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -19254,6 +21464,7 @@ class MySqlSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -19434,6 +21645,9 @@ class NetezzaSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -19455,6 +21669,7 @@ class NetezzaSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, @@ -19950,6 +22165,10 @@ class ODataLinkedService(LinkedService): :param service_principal_id: Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_id: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). :type aad_resource_id: object @@ -19992,6 +22211,7 @@ class ODataLinkedService(LinkedService): 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, @@ -20012,6 +22232,7 @@ def __init__( self.password = kwargs.get('password', None) self.tenant = kwargs.get('tenant', None) self.service_principal_id = kwargs.get('service_principal_id', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.aad_resource_id = kwargs.get('aad_resource_id', None) self.aad_service_principal_credential_type = kwargs.get('aad_service_principal_credential_type', None) self.service_principal_key = kwargs.get('service_principal_key', None) @@ -20100,6 +22321,14 @@ class ODataSource(CopySource): :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -20113,6 +22342,8 @@ class ODataSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -20122,6 +22353,8 @@ def __init__( super(ODataSource, self).__init__(**kwargs) self.type = 'ODataSource' # type: str self.query = kwargs.get('query', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) class OdbcLinkedService(LinkedService): @@ -20273,6 +22506,9 @@ class OdbcSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -20288,6 +22524,7 @@ class OdbcSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20859,6 +23096,9 @@ class OracleServiceCloudSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20875,6 +23115,7 @@ class OracleServiceCloudSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20971,6 +23212,9 @@ class OracleSource(CopySource): :type partition_option: str or ~azure.synapse.artifacts.models.OraclePartitionOption :param partition_settings: The settings that will be leveraged for Oracle source partitioning. :type partition_settings: ~azure.synapse.artifacts.models.OraclePartitionSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -20987,6 +23231,7 @@ class OracleSource(CopySource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -20999,6 +23244,7 @@ def __init__( self.query_timeout = kwargs.get('query_timeout', None) self.partition_option = kwargs.get('partition_option', None) self.partition_settings = kwargs.get('partition_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) class OracleTableDataset(Dataset): @@ -21099,7 +23345,7 @@ class OrcDataset(Dataset): :type folder: ~azure.synapse.artifacts.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". :type orc_compression_codec: str or ~azure.synapse.artifacts.models.OrcCompressionCodec """ @@ -21194,6 +23440,8 @@ class OrcSink(CopySink): :type max_concurrent_connections: object :param store_settings: ORC store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~azure.synapse.artifacts.models.OrcWriteSettings """ _validation = { @@ -21209,6 +23457,7 @@ class OrcSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__( @@ -21218,6 +23467,7 @@ def __init__( super(OrcSink, self).__init__(**kwargs) self.type = 'OrcSink' # type: str self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) class OrcSource(CopySource): @@ -21241,6 +23491,9 @@ class OrcSource(CopySource): :type max_concurrent_connections: object :param store_settings: ORC store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -21254,6 +23507,7 @@ class OrcSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -21263,6 +23517,47 @@ def __init__( super(OrcSource, self).__init__(**kwargs) self.type = 'OrcSource' # type: str self.store_settings = kwargs.get('store_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) + + +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(OrcWriteSettings, self).__init__(**kwargs) + self.type = 'OrcWriteSettings' # type: str + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) class ParameterSpecification(msrest.serialization.Model): @@ -21324,8 +23619,8 @@ class ParquetDataset(Dataset): :type folder: ~azure.synapse.artifacts.models.DatasetFolder :param location: The location of the parquet storage. :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo". - :type compression_codec: str or ~azure.synapse.artifacts.models.ParquetCompressionCodec + :param compression_codec: A string from ParquetCompressionCodecEnum or an expression. + :type compression_codec: object """ _validation = { @@ -21344,7 +23639,7 @@ class ParquetDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } def __init__( @@ -21419,6 +23714,8 @@ class ParquetSink(CopySink): :type max_concurrent_connections: object :param store_settings: Parquet store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~azure.synapse.artifacts.models.ParquetWriteSettings """ _validation = { @@ -21434,6 +23731,7 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__( @@ -21443,6 +23741,7 @@ def __init__( super(ParquetSink, self).__init__(**kwargs) self.type = 'ParquetSink' # type: str self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) class ParquetSource(CopySource): @@ -21466,6 +23765,9 @@ class ParquetSource(CopySource): :type max_concurrent_connections: object :param store_settings: Parquet store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -21479,6 +23781,7 @@ class ParquetSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -21488,6 +23791,47 @@ def __init__( super(ParquetSource, self).__init__(**kwargs) self.type = 'ParquetSource' # type: str self.store_settings = kwargs.get('store_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) + + +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ParquetWriteSettings, self).__init__(**kwargs) + self.type = 'ParquetWriteSettings' # type: str + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) class PaypalLinkedService(LinkedService): @@ -21647,6 +23991,9 @@ class PaypalSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21663,6 +24010,7 @@ class PaypalSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21871,6 +24219,9 @@ class PhoenixSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21887,6 +24238,7 @@ class PhoenixSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22328,6 +24680,9 @@ class PostgreSqlSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -22343,6 +24698,7 @@ class PostgreSqlSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22627,6 +24983,9 @@ class PrestoSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22643,6 +25002,7 @@ class PrestoSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22856,6 +25216,9 @@ class QuickBooksLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to QuickBooks. It is mutually + exclusive with any other properties in the linked service. Type: object. + :type connection_properties: object :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). :type endpoint: object @@ -22895,6 +25258,7 @@ class QuickBooksLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, @@ -22911,6 +25275,7 @@ def __init__( ): super(QuickBooksLinkedService, self).__init__(**kwargs) self.type = 'QuickBooks' # type: str + self.connection_properties = kwargs.get('connection_properties', None) self.endpoint = kwargs['endpoint'] self.company_id = kwargs['company_id'] self.consumer_key = kwargs['consumer_key'] @@ -23001,6 +25366,9 @@ class QuickBooksSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23017,6 +25385,7 @@ class QuickBooksSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23040,7 +25409,7 @@ class RecurrenceSchedule(msrest.serialization.Model): :param hours: The hours. :type hours: list[int] :param week_days: The days of the week. - :type week_days: list[str or ~azure.synapse.artifacts.models.DayOfWeek] + :type week_days: list[str or ~azure.synapse.artifacts.models.DaysOfWeek] :param month_days: The month days. :type month_days: list[int] :param monthly_occurrences: The monthly occurrences. @@ -23190,6 +25559,9 @@ class RelationalSource(CopySource): :type max_concurrent_connections: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -23203,6 +25575,7 @@ class RelationalSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -23212,6 +25585,7 @@ def __init__( super(RelationalSource, self).__init__(**kwargs) self.type = 'RelationalSource' # type: str self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) class RelationalTableDataset(Dataset): @@ -23370,7 +25744,7 @@ class RerunTumblingWindowTrigger(Trigger): :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param parent_trigger: The parent trigger reference. + :param parent_trigger: Required. The parent trigger reference. :type parent_trigger: object :param requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. @@ -23378,17 +25752,18 @@ class RerunTumblingWindowTrigger(Trigger): :param requested_end_time: Required. The end time for the time period for which restatement is initiated. Only UTC time is currently supported. :type requested_end_time: ~datetime.datetime - :param max_concurrency: Required. The max number of parallel time windows (ready for execution) - for which a rerun is triggered. - :type max_concurrency: int + :param rerun_concurrency: Required. The max number of parallel time windows (ready for + execution) for which a rerun is triggered. + :type rerun_concurrency: int """ _validation = { 'type': {'required': True}, 'runtime_state': {'readonly': True}, + 'parent_trigger': {'required': True}, 'requested_start_time': {'required': True}, 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { @@ -23400,7 +25775,7 @@ class RerunTumblingWindowTrigger(Trigger): 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, } def __init__( @@ -23409,10 +25784,10 @@ def __init__( ): super(RerunTumblingWindowTrigger, self).__init__(**kwargs) self.type = 'RerunTumblingWindowTrigger' # type: str - self.parent_trigger = kwargs.get('parent_trigger', None) + self.parent_trigger = kwargs['parent_trigger'] self.requested_start_time = kwargs['requested_start_time'] self.requested_end_time = kwargs['requested_end_time'] - self.max_concurrency = kwargs['max_concurrency'] + self.rerun_concurrency = kwargs['rerun_concurrency'] class RerunTumblingWindowTriggerActionParameters(msrest.serialization.Model): @@ -23613,6 +25988,9 @@ class ResponsysSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23629,6 +26007,7 @@ class ResponsysSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23761,6 +26140,10 @@ class RestServiceLinkedService(LinkedService): :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param aad_resource_id: The resource you are requesting authorization to use. :type aad_resource_id: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -23790,6 +26173,7 @@ class RestServiceLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -23808,10 +26192,86 @@ def __init__( self.service_principal_id = kwargs.get('service_principal_id', None) self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) + self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.aad_resource_id = kwargs.get('aad_resource_id', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) +class RestSink(CopySink): + """A copy activity Rest service Sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: + string (or Expression with resultType string). + :type request_method: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next request, in milliseconds. + :type request_interval: object + :param http_compression_type: Http Compression Type to Send data in compressed format with + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + :type http_compression_type: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(RestSink, self).__init__(**kwargs) + self.type = 'RestSink' # type: str + self.request_method = kwargs.get('request_method', None) + self.additional_headers = kwargs.get('additional_headers', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) + self.request_interval = kwargs.get('request_interval', None) + self.http_compression_type = kwargs.get('http_compression_type', None) + + class RestSource(CopySource): """A copy activity Rest service source. @@ -23850,6 +26310,9 @@ class RestSource(CopySource): :type http_request_timeout: object :param request_interval: The time to await before sending next page request. :type request_interval: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -23868,6 +26331,7 @@ class RestSource(CopySource): 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -23882,6 +26346,7 @@ def __init__( self.pagination_rules = kwargs.get('pagination_rules', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) self.request_interval = kwargs.get('request_interval', None) + self.additional_columns = kwargs.get('additional_columns', None) class RetryPolicy(msrest.serialization.Model): @@ -24061,8 +26526,11 @@ class SalesforceLinkedService(LinkedService): :type username: object :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.synapse.artifacts.models.SecretBase - :param security_token: The security token is required to remotely access Salesforce instance. + :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.synapse.artifacts.models.SecretBase + :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with + resultType string). + :type api_version: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -24084,6 +26552,7 @@ class SalesforceLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -24097,6 +26566,7 @@ def __init__( self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.security_token = kwargs.get('security_token', None) + self.api_version = kwargs.get('api_version', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -24118,6 +26588,9 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is + mutually exclusive with any other properties in the linked service. Type: object. + :type connection_properties: object :param client_id: Required. The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). :type client_id: object @@ -24153,6 +26626,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, @@ -24167,6 +26641,7 @@ def __init__( ): super(SalesforceMarketingCloudLinkedService, self).__init__(**kwargs) self.type = 'SalesforceMarketingCloud' # type: str + self.connection_properties = kwargs.get('connection_properties', None) self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) @@ -24255,6 +26730,9 @@ class SalesforceMarketingCloudSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24271,6 +26749,7 @@ class SalesforceMarketingCloudSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -24370,8 +26849,11 @@ class SalesforceServiceCloudLinkedService(LinkedService): :type username: object :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.synapse.artifacts.models.SecretBase - :param security_token: The security token is required to remotely access Salesforce instance. + :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.synapse.artifacts.models.SecretBase + :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with + resultType string). + :type api_version: object :param extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). :type extended_properties: object @@ -24396,6 +26878,7 @@ class SalesforceServiceCloudLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -24410,6 +26893,7 @@ def __init__( self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.security_token = kwargs.get('security_token', None) + self.api_version = kwargs.get('api_version', None) self.extended_properties = kwargs.get('extended_properties', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -24565,6 +27049,9 @@ class SalesforceServiceCloudSource(CopySource): :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". :type read_behavior: str or ~azure.synapse.artifacts.models.SalesforceSourceReadBehavior + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -24579,6 +27066,7 @@ class SalesforceServiceCloudSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -24589,6 +27077,7 @@ def __init__( self.type = 'SalesforceServiceCloudSource' # type: str self.query = kwargs.get('query', None) self.read_behavior = kwargs.get('read_behavior', None) + self.additional_columns = kwargs.get('additional_columns', None) class SalesforceSink(CopySink): @@ -24681,6 +27170,9 @@ class SalesforceSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values @@ -24699,6 +27191,7 @@ class SalesforceSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } @@ -24863,6 +27356,9 @@ class SapBwSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: MDX query. Type: string (or Expression with resultType string). :type query: object """ @@ -24878,6 +27374,7 @@ class SapBwSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25042,6 +27539,11 @@ class SapCloudForCustomerSink(CopySink): values include: "Insert", "Update". :type write_behavior: str or ~azure.synapse.artifacts.models.SapCloudForCustomerSinkWriteBehavior + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -25057,6 +27559,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__( @@ -25066,6 +27569,7 @@ def __init__( super(SapCloudForCustomerSink, self).__init__(**kwargs) self.type = 'SapCloudForCustomerSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) class SapCloudForCustomerSource(TabularSource): @@ -25090,9 +27594,17 @@ class SapCloudForCustomerSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -25106,7 +27618,9 @@ class SapCloudForCustomerSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__( @@ -25116,6 +27630,7 @@ def __init__( super(SapCloudForCustomerSource, self).__init__(**kwargs) self.type = 'SapCloudForCustomerSource' # type: str self.query = kwargs.get('query', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) class SapEccLinkedService(LinkedService): @@ -25263,9 +27778,17 @@ class SapEccSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -25279,7 +27802,9 @@ class SapEccSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__( @@ -25289,6 +27814,7 @@ def __init__( super(SapEccSource, self).__init__(**kwargs) self.type = 'SapEccSource' # type: str self.query = kwargs.get('query', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) class SapHanaLinkedService(LinkedService): @@ -25405,6 +27931,9 @@ class SapHanaSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression @@ -25429,6 +27958,7 @@ class SapHanaSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, @@ -25542,12 +28072,24 @@ class SapOpenHubLinkedService(LinkedService): :param language: Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). :type language: object + :param system_id: SystemID of the SAP system where the table is located. Type: string (or + Expression with resultType string). + :type system_id: object :param user_name: Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to access the SAP BW server where the open hub destination is located. :type password: ~azure.synapse.artifacts.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with + resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the Message Server. Type: + string (or Expression with resultType string). + :type message_server_service: object + :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with + resultType string). + :type logon_group: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -25572,8 +28114,12 @@ class SapOpenHubLinkedService(LinkedService): 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -25587,8 +28133,12 @@ def __init__( self.system_number = kwargs['system_number'] self.client_id = kwargs['client_id'] self.language = kwargs.get('language', None) + self.system_id = kwargs.get('system_id', None) self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) + self.message_server = kwargs.get('message_server', None) + self.message_server_service = kwargs.get('message_server_service', None) + self.logon_group = kwargs.get('logon_group', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -25614,6 +28164,9 @@ class SapOpenHubSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). :type exclude_last_request: object @@ -25621,6 +28174,13 @@ class SapOpenHubSource(TabularSource): requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). :type base_request_id: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that + will be used to read data from SAP Table. Type: string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param sap_data_column_delimiter: The single character that will be used as delimiter passed to + SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with + resultType string). + :type sap_data_column_delimiter: object """ _validation = { @@ -25634,8 +28194,11 @@ class SapOpenHubSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, } def __init__( @@ -25646,6 +28209,8 @@ def __init__( self.type = 'SapOpenHubSource' # type: str self.exclude_last_request = kwargs.get('exclude_last_request', None) self.base_request_id = kwargs.get('base_request_id', None) + self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.sap_data_column_delimiter = kwargs.get('sap_data_column_delimiter', None) class SapOpenHubTableDataset(Dataset): @@ -25961,6 +28526,9 @@ class SapTableSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object @@ -25979,6 +28547,10 @@ class SapTableSource(TabularSource): :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). :type custom_rfc_read_table_function_module: object + :param sap_data_column_delimiter: The single character that will be used as delimiter passed to + SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with + resultType string). + :type sap_data_column_delimiter: object :param partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". @@ -25999,12 +28571,14 @@ class SapTableSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } @@ -26021,6 +28595,7 @@ def __init__( self.rfc_table_options = kwargs.get('rfc_table_options', None) self.batch_size = kwargs.get('batch_size', None) self.custom_rfc_read_table_function_module = kwargs.get('custom_rfc_read_table_function_module', None) + self.sap_data_column_delimiter = kwargs.get('sap_data_column_delimiter', None) self.partition_option = kwargs.get('partition_option', None) self.partition_settings = kwargs.get('partition_settings', None) @@ -26207,7 +28782,7 @@ class SelfDependencyTumblingWindowTriggerReference(DependencyReference): _validation = { 'type': {'required': True}, - 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'-((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } @@ -26437,6 +29012,9 @@ class ServiceNowSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -26453,6 +29031,7 @@ class ServiceNowSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -26574,6 +29153,18 @@ class SftpReadSettings(StoreReadSettings): :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -26593,6 +29184,10 @@ class SftpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -26606,6 +29201,10 @@ def __init__( self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.file_list_path = kwargs.get('file_list_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -26729,6 +29328,10 @@ class SftpWriteSettings(StoreWriteSettings): :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). :type operation_timeout: object + :param use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if + your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType + boolean). + :type use_temp_file_rename: object """ _validation = { @@ -26741,6 +29344,7 @@ class SftpWriteSettings(StoreWriteSettings): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, + 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, } def __init__( @@ -26750,6 +29354,192 @@ def __init__( super(SftpWriteSettings, self).__init__(**kwargs) self.type = 'SftpWriteSettings' # type: str self.operation_timeout = kwargs.get('operation_timeout', None) + self.use_temp_file_rename = kwargs.get('use_temp_file_rename', None) + + +class SharePointOnlineListLinkedService(LinkedService): + """SharePoint Online List linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param site_url: Required. The URL of the SharePoint Online site. For example, + https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType + string). + :type site_url: object + :param tenant_id: Required. The tenant ID under which your application resides. You can find it + from Azure portal Active Directory overview page. Type: string (or Expression with resultType + string). + :type tenant_id: object + :param service_principal_id: Required. The application (client) ID of your application + registered in Azure Active Directory. Make sure to grant SharePoint site permission to this + application. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The client secret of your application registered in + Azure Active Directory. Type: string (or Expression with resultType string). + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'site_url': {'required': True}, + 'tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'site_url': {'key': 'typeProperties.siteUrl', 'type': 'object'}, + 'tenant_id': {'key': 'typeProperties.tenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SharePointOnlineListLinkedService, self).__init__(**kwargs) + self.type = 'SharePointOnlineList' # type: str + self.site_url = kwargs['site_url'] + self.tenant_id = kwargs['tenant_id'] + self.service_principal_id = kwargs['service_principal_id'] + self.service_principal_key = kwargs['service_principal_key'] + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class SharePointOnlineListResourceDataset(Dataset): + """The sharepoint online list resource dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param list_name: The name of the SharePoint Online list. Type: string (or Expression with + resultType string). + :type list_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'list_name': {'key': 'typeProperties.listName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SharePointOnlineListResourceDataset, self).__init__(**kwargs) + self.type = 'SharePointOnlineListResource' # type: str + self.list_name = kwargs.get('list_name', None) + + +class SharePointOnlineListSource(CopySource): + """A copy activity source for sharePoint online list source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: The OData query to filter the data in SharePoint Online list. For example, + "$top=1". Type: string (or Expression with resultType string). + :type query: object + :param http_request_timeout: The wait time to get a response from SharePoint Online. Default + value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SharePointOnlineListSource, self).__init__(**kwargs) + self.type = 'SharePointOnlineListSource' # type: str + self.query = kwargs.get('query', None) + self.http_request_timeout = kwargs.get('http_request_timeout', None) class ShopifyLinkedService(LinkedService): @@ -26905,6 +29695,9 @@ class ShopifySource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -26921,6 +29714,7 @@ class ShopifySource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -26933,6 +29727,31 @@ def __init__( self.query = kwargs.get('query', None) +class SkipErrorFile(msrest.serialization.Model): + """Skip error file. + + :param file_missing: Skip if file is deleted by other client during copy. Default is true. + Type: boolean (or Expression with resultType boolean). + :type file_missing: object + :param data_inconsistency: Skip if source/sink file changed by other concurrent write. Default + is false. Type: boolean (or Expression with resultType boolean). + :type data_inconsistency: object + """ + + _attribute_map = { + 'file_missing': {'key': 'fileMissing', 'type': 'object'}, + 'data_inconsistency': {'key': 'dataInconsistency', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SkipErrorFile, self).__init__(**kwargs) + self.file_missing = kwargs.get('file_missing', None) + self.data_inconsistency = kwargs.get('data_inconsistency', None) + + class Sku(msrest.serialization.Model): """SQL pool SKU. @@ -26961,6 +29780,320 @@ def __init__( self.capacity = kwargs.get('capacity', None) +class SnowflakeDataset(Dataset): + """The snowflake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param schema_type_properties_schema: The schema name of the Snowflake database. Type: string + (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Snowflake database. Type: string (or Expression with + resultType string). + :type table: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SnowflakeDataset, self).__init__(**kwargs) + self.type = 'SnowflakeTable' # type: str + self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) + self.table = kwargs.get('table', None) + + +class SnowflakeExportCopyCommand(ExportSettings): + """Snowflake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The export setting type.Constant filled by server. + :type type: str + :param additional_copy_options: Additional copy options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" }. + :type additional_copy_options: dict[str, object] + :param additional_format_options: Additional format options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" + }. + :type additional_format_options: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, + 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(SnowflakeExportCopyCommand, self).__init__(**kwargs) + self.type = 'SnowflakeExportCopyCommand' # type: str + self.additional_copy_options = kwargs.get('additional_copy_options', None) + self.additional_format_options = kwargs.get('additional_format_options', None) + + +class SnowflakeImportCopyCommand(ImportSettings): + """Snowflake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + :param additional_copy_options: Additional copy options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" }. + :type additional_copy_options: dict[str, object] + :param additional_format_options: Additional format options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": + "'FALSE'" }. + :type additional_format_options: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, + 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + } + + def __init__( + self, + **kwargs + ): + super(SnowflakeImportCopyCommand, self).__init__(**kwargs) + self.type = 'SnowflakeImportCopyCommand' # type: str + self.additional_copy_options = kwargs.get('additional_copy_options', None) + self.additional_format_options = kwargs.get('additional_format_options', None) + + +class SnowflakeLinkedService(LinkedService): + """Snowflake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string of snowflake. Type: string, + SecureString. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SnowflakeLinkedService, self).__init__(**kwargs) + self.type = 'Snowflake' # type: str + self.connection_string = kwargs['connection_string'] + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + + +class SnowflakeSink(CopySink): + """A copy activity snowflake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param import_settings: Snowflake import settings. + :type import_settings: ~azure.synapse.artifacts.models.SnowflakeImportCopyCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, + } + + def __init__( + self, + **kwargs + ): + super(SnowflakeSink, self).__init__(**kwargs) + self.type = 'SnowflakeSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.import_settings = kwargs.get('import_settings', None) + + +class SnowflakeSource(CopySource): + """A copy activity snowflake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Snowflake Sql query. Type: string (or Expression with resultType string). + :type query: object + :param export_settings: Snowflake export settings. + :type export_settings: ~azure.synapse.artifacts.models.SnowflakeExportCopyCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, + } + + def __init__( + self, + **kwargs + ): + super(SnowflakeSource, self).__init__(**kwargs) + self.type = 'SnowflakeSource' # type: str + self.query = kwargs.get('query', None) + self.export_settings = kwargs.get('export_settings', None) + + class SparkBatchJob(msrest.serialization.Model): """SparkBatchJob. @@ -27688,6 +30821,9 @@ class SparkSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27704,6 +30840,7 @@ class SparkSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27852,6 +30989,9 @@ class SqlDWSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -27863,6 +31003,11 @@ class SqlDWSource(TabularSource): Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. :type stored_procedure_parameters: object + :param partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: object + :param partition_settings: The settings that will be leveraged for Sql source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SqlPartitionSettings """ _validation = { @@ -27876,9 +31021,12 @@ class SqlDWSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -27890,6 +31038,8 @@ def __init__( self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) class SqlMISink(CopySink): @@ -27993,6 +31143,9 @@ class SqlMISource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed @@ -28005,6 +31158,11 @@ class SqlMISource(TabularSource): ~azure.synapse.artifacts.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object + :param partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: object + :param partition_settings: The settings that will be leveraged for Sql source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SqlPartitionSettings """ _validation = { @@ -28018,10 +31176,13 @@ class SqlMISource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -28034,6 +31195,43 @@ def __init__( self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) + + +class SqlPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for Sql source partitioning. + + :param partition_column_name: The name of the column in integer or datetime type that will be + used for proceeding partitioning. If not specified, the primary key of the table is auto- + detected and used as the partition column. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of the partition column for partition range + splitting. This value is used to decide the partition stride, not for filtering the rows in + table. All rows in the table or query result will be partitioned and copied. Type: string (or + Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of the partition column for partition range + splitting. This value is used to decide the partition stride, not for filtering the rows in + table. All rows in the table or query result will be partitioned and copied. Type: string (or + Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(SqlPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = kwargs.get('partition_column_name', None) + self.partition_upper_bound = kwargs.get('partition_upper_bound', None) + self.partition_lower_bound = kwargs.get('partition_lower_bound', None) class SqlPool(TrackedResource): @@ -28573,6 +31771,9 @@ class SqlServerSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -28585,6 +31786,11 @@ class SqlServerSource(TabularSource): ~azure.synapse.artifacts.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object + :param partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: object + :param partition_settings: The settings that will be leveraged for Sql source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SqlPartitionSettings """ _validation = { @@ -28598,10 +31804,13 @@ class SqlServerSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -28614,6 +31823,8 @@ def __init__( self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) self.produce_additional_types = kwargs.get('produce_additional_types', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) class SqlServerStoredProcedureActivity(ExecutionActivity): @@ -28846,6 +32057,9 @@ class SqlSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -28856,6 +32070,15 @@ class SqlSource(TabularSource): Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed + values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value + is ReadCommitted. Type: string (or Expression with resultType string). + :type isolation_level: object + :param partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: object + :param partition_settings: The settings that will be leveraged for Sql source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SqlPartitionSettings """ _validation = { @@ -28869,9 +32092,13 @@ class SqlSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -28883,6 +32110,9 @@ def __init__( self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.isolation_level = kwargs.get('isolation_level', None) + self.partition_option = kwargs.get('partition_option', None) + self.partition_settings = kwargs.get('partition_settings', None) class SquareLinkedService(LinkedService): @@ -28903,6 +32133,9 @@ class SquareLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Square. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object :param host: Required. The URL of the Square instance. (i.e. mystore.mysquare.com). :type host: object :param client_id: Required. The client ID associated with your Square application. @@ -28942,6 +32175,7 @@ class SquareLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, @@ -28958,6 +32192,7 @@ def __init__( ): super(SquareLinkedService, self).__init__(**kwargs) self.type = 'Square' # type: str + self.connection_properties = kwargs.get('connection_properties', None) self.host = kwargs['host'] self.client_id = kwargs['client_id'] self.client_secret = kwargs.get('client_secret', None) @@ -29048,6 +32283,9 @@ class SquareSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -29064,6 +32302,7 @@ class SquareSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -29291,7 +32530,7 @@ class SSISPackageLocation(msrest.serialization.Model): string). :type package_path: object :param type: The type of SSIS package location. Possible values include: "SSISDB", "File", - "InlinePackage". + "InlinePackage", "PackageStore". :type type: str or ~azure.synapse.artifacts.models.SsisPackageLocationType :param package_password: Password of the package. :type package_password: ~azure.synapse.artifacts.models.SecretBase @@ -29300,6 +32539,8 @@ class SSISPackageLocation(msrest.serialization.Model): :param configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). :type configuration_path: object + :param configuration_access_credential: The configuration file access credential. + :type configuration_access_credential: ~azure.synapse.artifacts.models.SSISAccessCredential :param package_name: The package name. :type package_name: str :param package_content: The embedded package content. Type: string (or Expression with @@ -29317,6 +32558,7 @@ class SSISPackageLocation(msrest.serialization.Model): 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecretBase'}, 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, + 'configuration_access_credential': {'key': 'typeProperties.configurationAccessCredential', 'type': 'SSISAccessCredential'}, 'package_name': {'key': 'typeProperties.packageName', 'type': 'str'}, 'package_content': {'key': 'typeProperties.packageContent', 'type': 'object'}, 'package_last_modified_date': {'key': 'typeProperties.packageLastModifiedDate', 'type': 'str'}, @@ -29333,6 +32575,7 @@ def __init__( self.package_password = kwargs.get('package_password', None) self.access_credential = kwargs.get('access_credential', None) self.configuration_path = kwargs.get('configuration_path', None) + self.configuration_access_credential = kwargs.get('configuration_access_credential', None) self.package_name = kwargs.get('package_name', None) self.package_content = kwargs.get('package_content', None) self.package_last_modified_date = kwargs.get('package_last_modified_date', None) @@ -29678,6 +32921,9 @@ class SybaseSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -29693,6 +32939,7 @@ class SybaseSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -29966,6 +33213,11 @@ class TabularTranslator(CopyTranslator): [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or Expression with resultType object). :type mappings: object + :param type_conversion: Whether to enable the advanced type conversion feature in the Copy + activity. Type: boolean (or Expression with resultType boolean). + :type type_conversion: object + :param type_conversion_settings: Type conversion settings. + :type type_conversion_settings: ~azure.synapse.artifacts.models.TypeConversionSettings """ _validation = { @@ -29980,6 +33232,8 @@ class TabularTranslator(CopyTranslator): 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, 'map_complex_values_to_string': {'key': 'mapComplexValuesToString', 'type': 'object'}, 'mappings': {'key': 'mappings', 'type': 'object'}, + 'type_conversion': {'key': 'typeConversion', 'type': 'object'}, + 'type_conversion_settings': {'key': 'typeConversionSettings', 'type': 'TypeConversionSettings'}, } def __init__( @@ -29993,6 +33247,76 @@ def __init__( self.collection_reference = kwargs.get('collection_reference', None) self.map_complex_values_to_string = kwargs.get('map_complex_values_to_string', None) self.mappings = kwargs.get('mappings', None) + self.type_conversion = kwargs.get('type_conversion', None) + self.type_conversion_settings = kwargs.get('type_conversion_settings', None) + + +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TarGZipReadSettings, self).__init__(**kwargs) + self.type = 'TarGZipReadSettings' # type: str + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TarReadSettings, self).__init__(**kwargs) + self.type = 'TarReadSettings' # type: str + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) class TeradataLinkedService(LinkedService): @@ -30119,6 +33443,9 @@ class TeradataSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object :param partition_option: The partition mechanism that will be used for teradata read in @@ -30140,6 +33467,7 @@ class TeradataSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, @@ -30643,7 +33971,7 @@ class TumblingWindowTrigger(Trigger): trigger window that is ready. :type pipeline: ~azure.synapse.artifacts.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: - "Minute", "Hour". + "Minute", "Hour", "Month". :type frequency: str or ~azure.synapse.artifacts.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. @@ -30732,7 +34060,7 @@ class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): _validation = { 'type': {'required': True}, 'reference_trigger': {'required': True}, - 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'-?((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } @@ -30753,6 +34081,51 @@ def __init__( self.size = kwargs.get('size', None) +class TypeConversionSettings(msrest.serialization.Model): + """Type conversion settings. + + :param allow_data_truncation: Whether to allow data truncation when converting the data. Type: + boolean (or Expression with resultType boolean). + :type allow_data_truncation: object + :param treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or + Expression with resultType boolean). + :type treat_boolean_as_number: object + :param date_time_format: The format for DateTime values. Type: string (or Expression with + resultType string). + :type date_time_format: object + :param date_time_offset_format: The format for DateTimeOffset values. Type: string (or + Expression with resultType string). + :type date_time_offset_format: object + :param time_span_format: The format for TimeSpan values. Type: string (or Expression with + resultType string). + :type time_span_format: object + :param culture: The culture used to convert data from/to string. Type: string (or Expression + with resultType string). + :type culture: object + """ + + _attribute_map = { + 'allow_data_truncation': {'key': 'allowDataTruncation', 'type': 'object'}, + 'treat_boolean_as_number': {'key': 'treatBooleanAsNumber', 'type': 'object'}, + 'date_time_format': {'key': 'dateTimeFormat', 'type': 'object'}, + 'date_time_offset_format': {'key': 'dateTimeOffsetFormat', 'type': 'object'}, + 'time_span_format': {'key': 'timeSpanFormat', 'type': 'object'}, + 'culture': {'key': 'culture', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TypeConversionSettings, self).__init__(**kwargs) + self.allow_data_truncation = kwargs.get('allow_data_truncation', None) + self.treat_boolean_as_number = kwargs.get('treat_boolean_as_number', None) + self.date_time_format = kwargs.get('date_time_format', None) + self.date_time_offset_format = kwargs.get('date_time_offset_format', None) + self.time_span_format = kwargs.get('time_span_format', None) + self.culture = kwargs.get('culture', None) + + class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. @@ -31023,6 +34396,9 @@ class VerticaSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31039,6 +34415,7 @@ class VerticaSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31158,7 +34535,7 @@ class WaitActivity(ControlActivity): :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] :param wait_time_in_seconds: Required. Duration in seconds. - :type wait_time_in_seconds: int + :type wait_time_in_seconds: object """ _validation = { @@ -31174,7 +34551,7 @@ class WaitActivity(ControlActivity): 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, + 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'object'}, } def __init__( @@ -31508,10 +34885,10 @@ class WebHookActivity(ControlActivity): :type body: object :param authentication: Authentication method used for calling the endpoint. :type authentication: ~azure.synapse.artifacts.models.WebActivityAuthentication - :param report_status_on_call_back: When set to true, - statusCode, output and error in callback request body will be - consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in - callback request. Default is false. Type: boolean (or Expression with resultType boolean). + :param report_status_on_call_back: When set to true, statusCode, output and error in callback + request body will be consumed by activity. The activity can be marked as failed by setting + statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with + resultType boolean). :type report_status_on_call_back: object """ @@ -31618,6 +34995,9 @@ class WebSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -31630,6 +35010,7 @@ class WebSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -31638,6 +35019,7 @@ def __init__( ): super(WebSource, self).__init__(**kwargs) self.type = 'WebSource' # type: str + self.additional_columns = kwargs.get('additional_columns', None) class WebTableDataset(Dataset): @@ -31982,6 +35364,9 @@ class XeroLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Xero. It is mutually exclusive with + any other properties in the linked service. Type: object. + :type connection_properties: object :param host: Required. The endpoint of the Xero server. (i.e. api.xero.com). :type host: object :param consumer_key: The consumer key associated with the Xero application. @@ -32018,6 +35403,7 @@ class XeroLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, @@ -32033,6 +35419,7 @@ def __init__( ): super(XeroLinkedService, self).__init__(**kwargs) self.type = 'Xero' # type: str + self.connection_properties = kwargs.get('connection_properties', None) self.host = kwargs['host'] self.consumer_key = kwargs.get('consumer_key', None) self.private_key = kwargs.get('private_key', None) @@ -32122,6 +35509,9 @@ class XeroSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -32138,6 +35528,7 @@ class XeroSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -32150,6 +35541,223 @@ def __init__( self.query = kwargs.get('query', None) +class XmlDataset(Dataset): + """Xml dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the json data storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not specified, the + default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column + of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param null_value: The null value string. Type: string (or Expression with resultType string). + :type null_value: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + **kwargs + ): + super(XmlDataset, self).__init__(**kwargs) + self.type = 'Xml' # type: str + self.location = kwargs.get('location', None) + self.encoding_name = kwargs.get('encoding_name', None) + self.null_value = kwargs.get('null_value', None) + self.compression = kwargs.get('compression', None) + + +class XmlReadSettings(FormatReadSettings): + """Xml read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param compression_properties: Compression settings. + :type compression_properties: ~azure.synapse.artifacts.models.CompressionReadSettings + :param validation_mode: Indicates what validation method is used when reading the xml files. + Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). + :type validation_mode: object + :param detect_data_type: Indicates whether type detection is enabled when reading the xml + files. Type: boolean (or Expression with resultType boolean). + :type detect_data_type: object + :param namespaces: Indicates whether namespace is enabled when reading the xml files. Type: + boolean (or Expression with resultType boolean). + :type namespaces: object + :param namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column + names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml + element/attribute name in the xml data file will be used. Example: + "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). + :type namespace_prefixes: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + 'validation_mode': {'key': 'validationMode', 'type': 'object'}, + 'detect_data_type': {'key': 'detectDataType', 'type': 'object'}, + 'namespaces': {'key': 'namespaces', 'type': 'object'}, + 'namespace_prefixes': {'key': 'namespacePrefixes', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(XmlReadSettings, self).__init__(**kwargs) + self.type = 'XmlReadSettings' # type: str + self.compression_properties = kwargs.get('compression_properties', None) + self.validation_mode = kwargs.get('validation_mode', None) + self.detect_data_type = kwargs.get('detect_data_type', None) + self.namespaces = kwargs.get('namespaces', None) + self.namespace_prefixes = kwargs.get('namespace_prefixes', None) + + +class XmlSource(CopySource): + """A copy activity Xml source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Xml store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param format_settings: Xml format settings. + :type format_settings: ~azure.synapse.artifacts.models.XmlReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + **kwargs + ): + super(XmlSource, self).__init__(**kwargs) + self.type = 'XmlSource' # type: str + self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) + + +class ZipDeflateReadSettings(CompressionReadSettings): + """The ZipDeflate compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: + boolean (or Expression with resultType boolean). + :type preserve_zip_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_zip_file_name_as_folder': {'key': 'preserveZipFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ZipDeflateReadSettings, self).__init__(**kwargs) + self.type = 'ZipDeflateReadSettings' # type: str + self.preserve_zip_file_name_as_folder = kwargs.get('preserve_zip_file_name_as_folder', None) + + class ZohoLinkedService(LinkedService): """Zoho server linked service. @@ -32168,6 +35776,9 @@ class ZohoLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Zoho. It is mutually exclusive with + any other properties in the linked service. Type: object. + :type connection_properties: object :param endpoint: Required. The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). :type endpoint: object :param access_token: The access token for Zoho authentication. @@ -32200,6 +35811,7 @@ class ZohoLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, @@ -32214,6 +35826,7 @@ def __init__( ): super(ZohoLinkedService, self).__init__(**kwargs) self.type = 'Zoho' # type: str + self.connection_properties = kwargs.get('connection_properties', None) self.endpoint = kwargs['endpoint'] self.access_token = kwargs.get('access_token', None) self.use_encrypted_endpoints = kwargs.get('use_encrypted_endpoints', None) @@ -32302,6 +35915,9 @@ class ZohoSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -32318,6 +35934,7 @@ class ZohoSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py index 891f75314d1e..8f4bd00d92bd 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py @@ -317,11 +317,37 @@ def __init__( self.job_version = job_version +class AdditionalColumns(msrest.serialization.Model): + """Specify the column name and value of additional columns. + + :param name: Additional column name. Type: string (or Expression with resultType string). + :type name: object + :param value: Additional column value. Type: string (or Expression with resultType string). + :type value: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__( + self, + *, + name: Optional[object] = None, + value: Optional[object] = None, + **kwargs + ): + super(AdditionalColumns, self).__init__(**kwargs) + self.name = name + self.value = value + + class LinkedService(msrest.serialization.Model): """The Azure Synapse nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMWSLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFSLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMLLinkedService, AzureMLServiceLinkedService, AzureMariaDBLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDWLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMILinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAXLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HDInsightLinkedService, HDInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBWLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, ShopifyLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. + sub-classes are: AmazonMWSLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFSLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMLLinkedService, AzureMLServiceLinkedService, AzureMariaDBLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDWLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMILinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAXLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HDInsightLinkedService, HDInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBWLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. All required parameters must be populated in order to send to Azure. @@ -354,7 +380,7 @@ class LinkedService(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMWSLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMLLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HDInsightLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBWLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'Shopify': 'ShopifyLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} + 'type': {'AmazonMWS': 'AmazonMWSLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMLLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HDInsightLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBWLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} } def __init__( @@ -490,7 +516,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMWSObjectDataset, AmazonRedshiftTableDataset, AvroDataset, AzureDataExplorerTableDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, ShopifyObjectDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, ZohoObjectDataset. + sub-classes are: AmazonMWSObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFSDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDBTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDWTableDataset, AzureSqlMITableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAXResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -536,7 +562,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'Avro': 'AvroDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMWSObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -636,7 +662,7 @@ class CopySource(msrest.serialization.Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFSSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, TabularSource, WebSource. + sub-classes are: AvroSource, AzureBlobFSSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. All required parameters must be populated in order to send to Azure. @@ -669,7 +695,7 @@ class CopySource(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource'} + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} } def __init__( @@ -714,6 +740,9 @@ class TabularSource(CopySource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -727,6 +756,7 @@ class TabularSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } _subtype_map = { @@ -741,11 +771,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'TabularSource' # type: str self.query_timeout = query_timeout + self.additional_columns = additional_columns class AmazonMWSSource(TabularSource): @@ -770,6 +802,9 @@ class AmazonMWSSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -786,6 +821,7 @@ class AmazonMWSSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -797,10 +833,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonMWSSource' # type: str self.query = query @@ -912,6 +949,9 @@ class AmazonRedshiftSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param redshift_unload_settings: The Amazon S3 settings needed for the interim Amazon S3 when @@ -931,6 +971,7 @@ class AmazonRedshiftSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } @@ -943,11 +984,12 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, redshift_unload_settings: Optional["RedshiftUnloadSettings"] = None, **kwargs ): - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AmazonRedshiftSource' # type: str self.query = query self.redshift_unload_settings = redshift_unload_settings @@ -1034,6 +1076,116 @@ def __init__( self.schema_type_properties_schema = schema_type_properties_schema +class AmazonS3Dataset(Dataset): + """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param bucket_name: Required. The name of the Amazon S3 bucket. Type: string (or Expression + with resultType string). + :type bucket_name: object + :param key: The key of the Amazon S3 object. Type: string (or Expression with resultType + string). + :type key: object + :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with + resultType string). + :type prefix: object + :param version: The version for the S3 object. Type: string (or Expression with resultType + string). + :type version: object + :param modified_datetime_start: The start of S3 object's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of S3 object's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of files. + :type format: ~azure.synapse.artifacts.models.DatasetStorageFormat + :param compression: The data compression method used for the Amazon S3 object. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + 'bucket_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'bucket_name': {'key': 'typeProperties.bucketName', 'type': 'object'}, + 'key': {'key': 'typeProperties.key', 'type': 'object'}, + 'prefix': {'key': 'typeProperties.prefix', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + bucket_name: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + key: Optional[object] = None, + prefix: Optional[object] = None, + version: Optional[object] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + format: Optional["DatasetStorageFormat"] = None, + compression: Optional["DatasetCompression"] = None, + **kwargs + ): + super(AmazonS3Dataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AmazonS3Object' # type: str + self.bucket_name = bucket_name + self.key = key + self.prefix = prefix + self.version = version + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.compression = compression + + class AmazonS3LinkedService(LinkedService): """Linked service for Amazon S3. @@ -1052,6 +1204,9 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) + or TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :type authentication_type: object :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). :type access_key_id: object @@ -1062,6 +1217,8 @@ class AmazonS3LinkedService(LinkedService): an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object + :param session_token: The session token for the S3 temporary security credential. + :type session_token: ~azure.synapse.artifacts.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1079,9 +1236,11 @@ class AmazonS3LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1093,17 +1252,21 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, access_key_id: Optional[object] = None, secret_access_key: Optional["SecretBase"] = None, service_url: Optional[object] = None, + session_token: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AmazonS3' # type: str + self.authentication_type = authentication_type self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url + self.session_token = session_token self.encrypted_credential = encrypted_credential @@ -1281,8 +1444,18 @@ class AmazonS3ReadSettings(StoreReadSettings): :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -1303,7 +1476,10 @@ class AmazonS3ReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -1317,7 +1493,10 @@ def __init__( wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, prefix: Optional[object] = None, + file_list_path: Optional[object] = None, enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, + delete_files_after_completion: Optional[object] = None, modified_datetime_start: Optional[object] = None, modified_datetime_end: Optional[object] = None, **kwargs @@ -1328,7 +1507,10 @@ def __init__( self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -1558,9 +1740,8 @@ class AvroDataset(Dataset): :type folder: ~azure.synapse.artifacts.models.DatasetFolder :param location: The location of the avro storage. :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param avro_compression_codec: Possible values include: "none", "deflate", "snappy", "xz", - "bzip2". - :type avro_compression_codec: str or ~azure.synapse.artifacts.models.AvroCompressionCodec + :param avro_compression_codec: A string from AvroCompressionCodecEnum or an expression. + :type avro_compression_codec: object :param avro_compression_level: :type avro_compression_level: int """ @@ -1582,7 +1763,7 @@ class AvroDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'str'}, + 'avro_compression_codec': {'key': 'typeProperties.avroCompressionCodec', 'type': 'object'}, 'avro_compression_level': {'key': 'typeProperties.avroCompressionLevel', 'type': 'int'}, } @@ -1598,7 +1779,7 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - avro_compression_codec: Optional[Union[str, "AvroCompressionCodec"]] = None, + avro_compression_codec: Optional[object] = None, avro_compression_level: Optional[int] = None, **kwargs ): @@ -1701,7 +1882,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. + sub-classes are: AvroSink, AzureBlobFSSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDWSink, SqlMISink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. @@ -1742,7 +1923,7 @@ class CopySink(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -1853,6 +2034,9 @@ class AvroSource(CopySource): :type max_concurrent_connections: object :param store_settings: Avro store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -1866,6 +2050,7 @@ class AvroSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -1876,18 +2061,20 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AvroSource' # type: str self.store_settings = store_settings + self.additional_columns = additional_columns class FormatWriteSettings(msrest.serialization.Model): """Format write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, ParquetWriteSettings. All required parameters must be populated in order to send to Azure. @@ -1908,7 +2095,7 @@ class FormatWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__( @@ -1936,6 +2123,13 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -1947,6 +2141,8 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -1955,12 +2151,16 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, record_name: Optional[str] = None, record_namespace: Optional[str] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, **kwargs ): super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'AvroWriteSettings' # type: str self.record_name = record_name self.record_namespace = record_namespace + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix class AzureBatchLinkedService(LinkedService): @@ -2049,6 +2249,194 @@ def __init__( self.encrypted_credential = encrypted_credential +class AzureBlobDataset(Dataset): + """The Azure Blob storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param folder_path: The path of the Azure Blob storage. Type: string (or Expression with + resultType string). + :type folder_path: object + :param table_root_location: The root of blob path. Type: string (or Expression with resultType + string). + :type table_root_location: object + :param file_name: The name of the Azure Blob. Type: string (or Expression with resultType + string). + :type file_name: object + :param modified_datetime_start: The start of Azure Blob's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of Azure Blob's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_end: object + :param format: The format of the Azure Blob storage. + :type format: ~azure.synapse.artifacts.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'table_root_location': {'key': 'typeProperties.tableRootLocation', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + folder_path: Optional[object] = None, + table_root_location: Optional[object] = None, + file_name: Optional[object] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + format: Optional["DatasetStorageFormat"] = None, + compression: Optional["DatasetCompression"] = None, + **kwargs + ): + super(AzureBlobDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureBlob' # type: str + self.folder_path = folder_path + self.table_root_location = table_root_location + self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.compression = compression + + +class AzureBlobFSDataset(Dataset): + """The Azure Data Lake Storage Gen2 storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param folder_path: The path of the Azure Data Lake Storage Gen2 storage. Type: string (or + Expression with resultType string). + :type folder_path: object + :param file_name: The name of the Azure Data Lake Storage Gen2. Type: string (or Expression + with resultType string). + :type file_name: object + :param format: The format of the Azure Data Lake Storage Gen2 storage. + :type format: ~azure.synapse.artifacts.models.DatasetStorageFormat + :param compression: The data compression method used for the blob storage. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + format: Optional["DatasetStorageFormat"] = None, + compression: Optional["DatasetCompression"] = None, + **kwargs + ): + super(AzureBlobFSDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureBlobFSFile' # type: str + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + + class AzureBlobFSLinkedService(LinkedService): """Azure Data Lake Storage Gen2 linked service. @@ -2082,6 +2470,10 @@ class AzureBlobFSLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2105,6 +2497,7 @@ class AzureBlobFSLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -2121,6 +2514,7 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): @@ -2131,6 +2525,7 @@ def __init__( self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential @@ -2203,8 +2598,18 @@ class AzureBlobFSReadSettings(StoreReadSettings): :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -2224,7 +2629,10 @@ class AzureBlobFSReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -2237,7 +2645,10 @@ def __init__( recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, + file_list_path: Optional[object] = None, enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, + delete_files_after_completion: Optional[object] = None, modified_datetime_start: Optional[object] = None, modified_datetime_end: Optional[object] = None, **kwargs @@ -2247,7 +2658,10 @@ def __init__( self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -2381,7 +2795,7 @@ class StoreWriteSettings(msrest.serialization.Model): """Connector write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, FileServerWriteSettings, SftpWriteSettings. + sub-classes are: AzureBlobFSWriteSettings, AzureBlobStorageWriteSettings, AzureDataLakeStoreWriteSettings, AzureFileStorageWriteSettings, FileServerWriteSettings, SftpWriteSettings. All required parameters must be populated in order to send to Azure. @@ -2409,7 +2823,7 @@ class StoreWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} + 'type': {'AzureBlobFSWriteSettings': 'AzureBlobFSWriteSettings', 'AzureBlobStorageWriteSettings': 'AzureBlobStorageWriteSettings', 'AzureDataLakeStoreWriteSettings': 'AzureDataLakeStoreWriteSettings', 'AzureFileStorageWriteSettings': 'AzureFileStorageWriteSettings', 'FileServerWriteSettings': 'FileServerWriteSettings', 'SftpWriteSettings': 'SftpWriteSettings'} } def __init__( @@ -2514,6 +2928,10 @@ class AzureBlobStorageLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2539,6 +2957,7 @@ class AzureBlobStorageLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } @@ -2558,6 +2977,7 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[str] = None, **kwargs ): @@ -2571,6 +2991,7 @@ def __init__( self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential @@ -2646,8 +3067,18 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -2668,7 +3099,10 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -2682,7 +3116,10 @@ def __init__( wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, prefix: Optional[object] = None, + file_list_path: Optional[object] = None, enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, + delete_files_after_completion: Optional[object] = None, modified_datetime_start: Optional[object] = None, modified_datetime_end: Optional[object] = None, **kwargs @@ -2693,7 +3130,10 @@ def __init__( self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -2744,286 +3184,246 @@ def __init__( self.block_size_in_mb = block_size_in_mb -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to - https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression - with resultType string). - :type access_token: ~azure.synapse.artifacts.models.SecretBase - :param existing_cluster_id: The id of an existing interactive cluster that will be used for all - runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: object - :param instance_pool_id: The id of an existing instance pool that will be used for all runs of - this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: object - :param new_cluster_version: If not using an existing interactive cluster, this specifies the - Spark version of a new job cluster or instance pool nodes created for each run of this - activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param table: The name of delta table. Type: string (or Expression with resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or Expression with resultType string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies - the number of worker nodes to use for the new job cluster or instance pool. For new job - clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- - scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can - only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is - specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node type of the new job cluster. This property is required - if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is - specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value - pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment - variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored - in instance pool configurations. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_driver_node_type: The driver node type for the new job cluster. This - property is ignored in instance pool configurations. Type: string (or Expression with - resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: - array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This - property is now ignored, and takes the default elastic disk behavior in Databricks (elastic - disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :type database: object """ _validation = { 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__( self, *, - domain: object, - access_token: "SecretBase", + linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - existing_cluster_id: Optional[object] = None, - instance_pool_id: Optional[object] = None, - new_cluster_version: Optional[object] = None, - new_cluster_num_of_worker: Optional[object] = None, - new_cluster_node_type: Optional[object] = None, - new_cluster_spark_conf: Optional[Dict[str, object]] = None, - new_cluster_spark_env_vars: Optional[Dict[str, object]] = None, - new_cluster_custom_tags: Optional[Dict[str, object]] = None, - new_cluster_driver_node_type: Optional[object] = None, - new_cluster_init_scripts: Optional[object] = None, - new_cluster_enable_elastic_disk: Optional[object] = None, - encrypted_credential: Optional[object] = None, + folder: Optional["DatasetFolder"] = None, + table: Optional[object] = None, + database: Optional[object] = None, **kwargs ): - super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureDatabricks' # type: str - self.domain = domain - self.access_token = access_token - self.existing_cluster_id = existing_cluster_id - self.instance_pool_id = instance_pool_id - self.new_cluster_version = new_cluster_version - self.new_cluster_num_of_worker = new_cluster_num_of_worker - self.new_cluster_node_type = new_cluster_node_type - self.new_cluster_spark_conf = new_cluster_spark_conf - self.new_cluster_spark_env_vars = new_cluster_spark_env_vars - self.new_cluster_custom_tags = new_cluster_custom_tags - self.new_cluster_driver_node_type = new_cluster_driver_node_type - self.new_cluster_init_scripts = new_cluster_init_scripts - self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk - self.encrypted_credential = encrypted_credential + super(AzureDatabricksDeltaLakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureDatabricksDeltaLakeDataset' # type: str + self.table = table + self.database = database -class ExecutionActivity(Activity): - """Base class for all execution activities. +class ExportSettings(msrest.serialization.Model): + """Export command settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMLBatchExecutionActivity, AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUSQLActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSSISPackageActivity, GetMetadataActivity, HDInsightHiveActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightSparkActivity, HDInsightStreamingActivity, LookupActivity, SynapseSparkJobDefinitionActivity, SqlServerStoredProcedureActivity, SynapseNotebookActivity, WebActivity. + sub-classes are: AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The export setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMLExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SparkJob': 'SynapseSparkJobDefinitionActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'SynapseNotebook': 'SynapseNotebookActivity', 'WebActivity': 'WebActivity'} + 'type': {'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand', 'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} } def __init__( self, *, - name: str, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, **kwargs ): - super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) - self.type = 'Execution' # type: str - self.linked_service_name = linked_service_name - self.policy = policy + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'ExportSettings' # type: str -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The export setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data Explorer command - syntax. Type: string (or Expression with resultType string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: object + :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta + Lake Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, } def __init__( self, *, - name: str, - command: object, additional_properties: Optional[Dict[str, object]] = None, - description: Optional[str] = None, - depends_on: Optional[List["ActivityDependency"]] = None, - user_properties: Optional[List["UserProperty"]] = None, - linked_service_name: Optional["LinkedServiceReference"] = None, - policy: Optional["ActivityPolicy"] = None, - command_timeout: Optional[object] = None, + date_format: Optional[object] = None, + timestamp_format: Optional[object] = None, **kwargs ): - super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) - self.type = 'AzureDataExplorerCommand' # type: str - self.command = command - self.command_timeout = command_timeout + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str + self.date_format = date_format + self.timestamp_format = timestamp_format -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. +class ImportSettings(msrest.serialization.Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'ImportSettings' # type: str + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: + string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + date_format: Optional[object] = None, + timestamp_format: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str + self.date_format = date_format + self.timestamp_format = timestamp_format + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. All required parameters must be populated in order to send to Azure. @@ -3040,31 +3440,26 @@ class AzureDataExplorerLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal used to authenticate - against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal used to authenticate - against Kusto. - :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase - :param database: Required. Database name for connection. Type: string (or Expression with + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Required. Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be used for all runs of + this job. Type: string (or Expression with resultType string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, + 'domain': {'required': True}, + 'access_token': {'required': True}, } _attribute_map = { @@ -3074,39 +3469,544 @@ class AzureDataExplorerLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, *, - endpoint: object, - service_principal_id: object, - service_principal_key: "SecretBase", - database: object, - tenant: object, + domain: object, + access_token: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + cluster_id: Optional[object] = None, + encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureDataExplorer' # type: str - self.endpoint = endpoint - self.service_principal_id = service_principal_id - self.service_principal_key = service_principal_key - self.database = database - self.tenant = tenant + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDatabricksDeltaLake' # type: str + self.domain = domain + self.access_token = access_token + self.cluster_id = cluster_id + self.encrypted_credential = encrypted_credential -class AzureDataExplorerSink(CopySink): - """A copy activity Azure Data Explorer sink. +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: ~azure.synapse.artifacts.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + import_settings: Optional["AzureDatabricksDeltaLakeImportCommand"] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDatabricksDeltaLakeSink' # type: str + self.pre_copy_script = pre_copy_script + self.import_settings = import_settings + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: ~azure.synapse.artifacts.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + export_settings: Optional["AzureDatabricksDeltaLakeExportCommand"] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDatabricksDeltaLakeSource' # type: str + self.query = query + self.export_settings = export_settings + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression + with resultType string). + :type access_token: ~azure.synapse.artifacts.models.SecretBase + :param authentication: Required to specify MSI, if using Workspace resource id for databricks + REST API. Type: string (or Expression with resultType string). + :type authentication: object + :param workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or + Expression with resultType string). + :type workspace_resource_id: object + :param existing_cluster_id: The id of an existing interactive cluster that will be used for all + runs of this activity. Type: string (or Expression with resultType string). + :type existing_cluster_id: object + :param instance_pool_id: The id of an existing instance pool that will be used for all runs of + this activity. Type: string (or Expression with resultType string). + :type instance_pool_id: object + :param new_cluster_version: If not using an existing interactive cluster, this specifies the + Spark version of a new job cluster or instance pool nodes created for each run of this + activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies + the number of worker nodes to use for the new job cluster or instance pool. For new job + clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- + scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can + only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + specified. Type: string (or Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node type of the new job cluster. This property is required + if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + specified, this property is ignored. Type: string (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value + pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment + variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored + in instance pool configurations. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and + event logs. Type: string (or Expression with resultType string). + :type new_cluster_log_destination: object + :param new_cluster_driver_node_type: The driver node type for the new job cluster. This + property is ignored in instance pool configurations. Type: string (or Expression with + resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: + array of strings (or Expression with resultType array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + property is now ignored, and takes the default elastic disk behavior in Databricks (elastic + disks are always enabled). Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param policy_id: The policy id for limiting the ability to configure clusters based on a user + defined set of rules. Type: string (or Expression with resultType string). + :type policy_id: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, + 'workspace_resource_id': {'key': 'typeProperties.workspaceResourceId', 'type': 'object'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_log_destination': {'key': 'typeProperties.newClusterLogDestination', 'type': 'object'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, + } + + def __init__( + self, + *, + domain: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + access_token: Optional["SecretBase"] = None, + authentication: Optional[object] = None, + workspace_resource_id: Optional[object] = None, + existing_cluster_id: Optional[object] = None, + instance_pool_id: Optional[object] = None, + new_cluster_version: Optional[object] = None, + new_cluster_num_of_worker: Optional[object] = None, + new_cluster_node_type: Optional[object] = None, + new_cluster_spark_conf: Optional[Dict[str, object]] = None, + new_cluster_spark_env_vars: Optional[Dict[str, object]] = None, + new_cluster_custom_tags: Optional[Dict[str, object]] = None, + new_cluster_log_destination: Optional[object] = None, + new_cluster_driver_node_type: Optional[object] = None, + new_cluster_init_scripts: Optional[object] = None, + new_cluster_enable_elastic_disk: Optional[object] = None, + encrypted_credential: Optional[object] = None, + policy_id: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDatabricks' # type: str + self.domain = domain + self.access_token = access_token + self.authentication = authentication + self.workspace_resource_id = workspace_resource_id + self.existing_cluster_id = existing_cluster_id + self.instance_pool_id = instance_pool_id + self.new_cluster_version = new_cluster_version + self.new_cluster_num_of_worker = new_cluster_num_of_worker + self.new_cluster_node_type = new_cluster_node_type + self.new_cluster_spark_conf = new_cluster_spark_conf + self.new_cluster_spark_env_vars = new_cluster_spark_env_vars + self.new_cluster_custom_tags = new_cluster_custom_tags + self.new_cluster_log_destination = new_cluster_log_destination + self.new_cluster_driver_node_type = new_cluster_driver_node_type + self.new_cluster_init_scripts = new_cluster_init_scripts + self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk + self.encrypted_credential = encrypted_credential + self.policy_id = policy_id + + +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMLBatchExecutionActivity, AzureMLExecutePipelineActivity, AzureMLUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUSQLActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSSISPackageActivity, GetMetadataActivity, HDInsightHiveActivity, HDInsightMapReduceActivity, HDInsightPigActivity, HDInsightSparkActivity, HDInsightStreamingActivity, LookupActivity, SynapseSparkJobDefinitionActivity, SqlServerStoredProcedureActivity, SynapseNotebookActivity, WebActivity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMLBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMLExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMLUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUSQLActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSSISPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HDInsightHiveActivity', 'HDInsightMapReduce': 'HDInsightMapReduceActivity', 'HDInsightPig': 'HDInsightPigActivity', 'HDInsightSpark': 'HDInsightSparkActivity', 'HDInsightStreaming': 'HDInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SparkJob': 'SynapseSparkJobDefinitionActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'SynapseNotebook': 'SynapseNotebookActivity', 'WebActivity': 'WebActivity'} + } + + def __init__( + self, + *, + name: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + **kwargs + ): + super(ExecutionActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, **kwargs) + self.type = 'Execution' # type: str + self.linked_service_name = linked_service_name + self.policy = policy + + +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~azure.synapse.artifacts.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~azure.synapse.artifacts.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data Explorer command + syntax. Type: string (or Expression with resultType string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). + :type command_timeout: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + } + + def __init__( + self, + *, + name: str, + command: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + depends_on: Optional[List["ActivityDependency"]] = None, + user_properties: Optional[List["UserProperty"]] = None, + linked_service_name: Optional["LinkedServiceReference"] = None, + policy: Optional["ActivityPolicy"] = None, + command_timeout: Optional[object] = None, + **kwargs + ): + super(AzureDataExplorerCommandActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + self.type = 'AzureDataExplorerCommand' # type: str + self.command = command + self.command_timeout = command_timeout + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL + will be in the format https://:code:``.:code:``.kusto.windows.net. + Type: string (or Expression with resultType string). + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal used to authenticate + against Azure Data Explorer. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal used to authenticate + against Kusto. + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__( + self, + *, + endpoint: object, + service_principal_id: object, + service_principal_key: "SecretBase", + database: object, + tenant: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + **kwargs + ): + super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDataExplorer' # type: str + self.endpoint = endpoint + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.database = database + self.tenant = tenant + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. All required parameters must be populated in order to send to Azure. @@ -3207,6 +4107,9 @@ class AzureDataExplorerSource(CopySource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -3223,6 +4126,7 @@ class AzureDataExplorerSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -3235,6 +4139,7 @@ def __init__( max_concurrent_connections: Optional[object] = None, no_truncation: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -3242,6 +4147,7 @@ def __init__( self.query = query self.no_truncation = no_truncation self.query_timeout = query_timeout + self.additional_columns = additional_columns class AzureDataExplorerTableDataset(Dataset): @@ -3411,6 +4317,92 @@ def __init__( self.encrypted_credential = encrypted_credential +class AzureDataLakeStoreDataset(Dataset): + """Azure Data Lake Store dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param folder_path: Path to the folder in the Azure Data Lake Store. Type: string (or + Expression with resultType string). + :type folder_path: object + :param file_name: The name of the file in the Azure Data Lake Store. Type: string (or + Expression with resultType string). + :type file_name: object + :param format: The format of the Data Lake Store. + :type format: ~azure.synapse.artifacts.models.DatasetStorageFormat + :param compression: The data compression method used for the item(s) in the Azure Data Lake + Store. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + format: Optional["DatasetStorageFormat"] = None, + compression: Optional["DatasetCompression"] = None, + **kwargs + ): + super(AzureDataLakeStoreDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureDataLakeStoreFile' # type: str + self.folder_path = folder_path + self.file_name = file_name + self.format = format + self.compression = compression + + class AzureDataLakeStoreLinkedService(LinkedService): """Azure Data Lake Store linked service. @@ -3441,6 +4433,10 @@ class AzureDataLakeStoreLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param account_name: Data Lake Store account name. Type: string (or Expression with resultType string). :type account_name: object @@ -3472,6 +4468,7 @@ class AzureDataLakeStoreLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'account_name': {'key': 'typeProperties.accountName', 'type': 'object'}, 'subscription_id': {'key': 'typeProperties.subscriptionId', 'type': 'object'}, 'resource_group_name': {'key': 'typeProperties.resourceGroupName', 'type': 'object'}, @@ -3490,6 +4487,7 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, account_name: Optional[object] = None, subscription_id: Optional[object] = None, resource_group_name: Optional[object] = None, @@ -3502,6 +4500,7 @@ def __init__( self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.account_name = account_name self.subscription_id = subscription_id self.resource_group_name = resource_group_name @@ -3571,8 +4570,26 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object + :param list_after: Lists files after the value (exclusive) based on file/folder names’ + lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders + under the folderPath. Type: string (or Expression with resultType string). + :type list_after: object + :param list_before: Lists files before the value (inclusive) based on file/folder names’ + lexicographical order. Applies under the folderPath in data set, and filter files/sub-folders + under the folderPath. Type: string (or Expression with resultType string). + :type list_before: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -3592,7 +4609,12 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'list_after': {'key': 'listAfter', 'type': 'object'}, + 'list_before': {'key': 'listBefore', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -3605,7 +4627,12 @@ def __init__( recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, + file_list_path: Optional[object] = None, + list_after: Optional[object] = None, + list_before: Optional[object] = None, enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, + delete_files_after_completion: Optional[object] = None, modified_datetime_start: Optional[object] = None, modified_datetime_end: Optional[object] = None, **kwargs @@ -3615,7 +4642,12 @@ def __init__( self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path + self.list_after = list_after + self.list_before = list_before self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -3743,6 +4775,372 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param copy_behavior: The type of copy behavior for copy sink. + :type copy_behavior: object + :param expiry_date_time: Specifies the expiry time of the written files. The time is applied to + the UTC time zone in the format of "2018-12-01T05:00:00Z". Default value is NULL. Type: integer + (or Expression with resultType integer). + :type expiry_date_time: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, + 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + copy_behavior: Optional[object] = None, + expiry_date_time: Optional[object] = None, + **kwargs + ): + super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureDataLakeStoreWriteSettings' # type: str + self.expiry_date_time = expiry_date_time + + +class Resource(msrest.serialization.Model): + """Common fields that are returned in the response for all Azure Resource Manager resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class AzureEntityResource(Resource): + """The resource model definition for an Azure Resource Manager resource with an etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureEntityResource, self).__init__(**kwargs) + self.etag = None + + +class AzureFileStorageLinkedService(LinkedService): + """Azure File Storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param host: Required. Host name of the server. Type: string (or Expression with resultType + string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression with resultType + string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.synapse.artifacts.models.SecretBase + :param connection_string: The connection string. It is mutually exclusive with sasUri property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param account_key: The Azure key vault secret reference of accountKey in connection string. + :type account_key: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param sas_uri: SAS URI of the Azure File resource. It is mutually exclusive with + connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + :type sas_uri: object + :param sas_token: The Azure key vault secret reference of sasToken in sas uri. + :type sas_token: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param file_share: The azure file share name. It is required when auth with + accountKey/sasToken. Type: string (or Expression with resultType string). + :type file_share: object + :param snapshot: The azure file share snapshot version. Type: string (or Expression with + resultType string). + :type snapshot: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'account_key': {'key': 'typeProperties.accountKey', 'type': 'AzureKeyVaultSecretReference'}, + 'sas_uri': {'key': 'typeProperties.sasUri', 'type': 'object'}, + 'sas_token': {'key': 'typeProperties.sasToken', 'type': 'AzureKeyVaultSecretReference'}, + 'file_share': {'key': 'typeProperties.fileShare', 'type': 'object'}, + 'snapshot': {'key': 'typeProperties.snapshot', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + host: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + user_id: Optional[object] = None, + password: Optional["SecretBase"] = None, + connection_string: Optional[object] = None, + account_key: Optional["AzureKeyVaultSecretReference"] = None, + sas_uri: Optional[object] = None, + sas_token: Optional["AzureKeyVaultSecretReference"] = None, + file_share: Optional[object] = None, + snapshot: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureFileStorage' # type: str + self.host = host + self.user_id = user_id + self.password = password + self.connection_string = connection_string + self.account_key = account_key + self.sas_uri = sas_uri + self.sas_token = sas_token + self.file_share = file_share + self.snapshot = snapshot + self.encrypted_credential = encrypted_credential + + +class AzureFileStorageLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location.Constant filled by server. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType + string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + **kwargs + ): + super(AzureFileStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.type = 'AzureFileStorageLocation' # type: str + + +class AzureFileStorageReadSettings(StoreReadSettings): + """Azure File Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read recursively. Default is + true. Type: boolean (or Expression with resultType boolean). + :type recursive: object + :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression + with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression + with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Azure File name starting from root path. Type: string + (or Expression with resultType string). + :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_concurrent_connections: Optional[object] = None, + recursive: Optional[object] = None, + wildcard_folder_path: Optional[object] = None, + wildcard_file_name: Optional[object] = None, + prefix: Optional[object] = None, + file_list_path: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, + delete_files_after_completion: Optional[object] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + **kwargs + ): + super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureFileStorageReadSettings' # type: str + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.file_list_path = file_list_path + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + +class AzureFileStorageWriteSettings(StoreWriteSettings): + """Azure File Storage write settings. + + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] @@ -3774,268 +5172,8 @@ def __init__( copy_behavior: Optional[object] = None, **kwargs ): - super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) - self.type = 'AzureDataLakeStoreWriteSettings' # type: str - - -class Resource(msrest.serialization.Model): - """Common fields that are returned in the response for all Azure Resource Manager resources. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - - -class AzureEntityResource(Resource): - """The resource model definition for an Azure Resource Manager resource with an etag. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar etag: Resource Etag. - :vartype etag: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AzureEntityResource, self).__init__(**kwargs) - self.etag = None - - -class AzureFileStorageLinkedService(LinkedService): - """Azure File Storage linked service. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. - :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference - :param description: Linked service description. - :type description: str - :param parameters: Parameters for linked service. - :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. - :type annotations: list[object] - :param host: Required. Host name of the server. Type: string (or Expression with resultType - string). - :type host: object - :param user_id: User ID to logon the server. Type: string (or Expression with resultType - string). - :type user_id: object - :param password: Password to logon the server. - :type password: ~azure.synapse.artifacts.models.SecretBase - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object - """ - - _validation = { - 'type': {'required': True}, - 'host': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, - 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'host': {'key': 'typeProperties.host', 'type': 'object'}, - 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, - 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, - } - - def __init__( - self, - *, - host: object, - additional_properties: Optional[Dict[str, object]] = None, - connect_via: Optional["IntegrationRuntimeReference"] = None, - description: Optional[str] = None, - parameters: Optional[Dict[str, "ParameterSpecification"]] = None, - annotations: Optional[List[object]] = None, - user_id: Optional[object] = None, - password: Optional["SecretBase"] = None, - encrypted_credential: Optional[object] = None, - **kwargs - ): - super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.type = 'AzureFileStorage' # type: str - self.host = host - self.user_id = user_id - self.password = password - self.encrypted_credential = encrypted_credential - - -class AzureFileStorageLocation(DatasetLocation): - """The location of file server dataset. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. Type of dataset storage location.Constant filled by server. - :type type: str - :param folder_path: Specify the folder path of dataset. Type: string (or Expression with - resultType string). - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or Expression with resultType - string). - :type file_name: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - folder_path: Optional[object] = None, - file_name: Optional[object] = None, - **kwargs - ): - super(AzureFileStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) - self.type = 'AzureFileStorageLocation' # type: str - - -class AzureFileStorageReadSettings(StoreReadSettings): - """Azure File Storage read settings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - :param max_concurrent_connections: The maximum concurrent connection count for the source data - store. Type: integer (or Expression with resultType integer). - :type max_concurrent_connections: object - :param recursive: If true, files under the folder path will be read recursively. Default is - true. Type: boolean (or Expression with resultType boolean). - :type recursive: object - :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: string (or Expression - with resultType string). - :type wildcard_folder_path: object - :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression - with resultType string). - :type wildcard_file_name: object - :param enable_partition_discovery: Indicates whether to enable partition discovery. - :type enable_partition_discovery: bool - :param modified_datetime_start: The start of file's modified datetime. Type: string (or - Expression with resultType string). - :type modified_datetime_start: object - :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression - with resultType string). - :type modified_datetime_end: object - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'recursive': {'key': 'recursive', 'type': 'object'}, - 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, - 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, - 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, - 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, - 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - max_concurrent_connections: Optional[object] = None, - recursive: Optional[object] = None, - wildcard_folder_path: Optional[object] = None, - wildcard_file_name: Optional[object] = None, - enable_partition_discovery: Optional[bool] = None, - modified_datetime_start: Optional[object] = None, - modified_datetime_end: Optional[object] = None, - **kwargs - ): - super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) - self.type = 'AzureFileStorageReadSettings' # type: str - self.recursive = recursive - self.wildcard_folder_path = wildcard_folder_path - self.wildcard_file_name = wildcard_file_name - self.enable_partition_discovery = enable_partition_discovery - self.modified_datetime_start = modified_datetime_start - self.modified_datetime_end = modified_datetime_end + super(AzureFileStorageWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.type = 'AzureFileStorageWriteSettings' # type: str class AzureFunctionActivity(ExecutionActivity): @@ -4405,6 +5543,9 @@ class AzureMariaDBSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -4421,6 +5562,7 @@ class AzureMariaDBSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -4432,10 +5574,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMariaDBSource' # type: str self.query = query @@ -5130,6 +6273,9 @@ class AzureMySqlSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -5145,6 +6291,7 @@ class AzureMySqlSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5156,10 +6303,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMySqlSource' # type: str self.query = query @@ -5388,6 +6536,9 @@ class AzurePostgreSqlSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -5404,6 +6555,7 @@ class AzurePostgreSqlSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5415,10 +6567,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzurePostgreSqlSource' # type: str self.query = query @@ -5790,6 +6943,10 @@ class AzureSqlDatabaseLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5813,6 +6970,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -5829,6 +6987,7 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): @@ -5839,6 +6998,7 @@ def __init__( self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential @@ -5874,6 +7034,10 @@ class AzureSqlDWLinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -5897,6 +7061,7 @@ class AzureSqlDWLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -5913,6 +7078,7 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): @@ -5923,6 +7089,7 @@ def __init__( self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential @@ -6039,6 +7206,10 @@ class AzureSqlMILinkedService(LinkedService): :param tenant: The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -6062,6 +7233,7 @@ class AzureSqlMILinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -6078,6 +7250,7 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): @@ -6088,6 +7261,7 @@ def __init__( self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential @@ -6286,6 +7460,9 @@ class AzureSqlSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -6298,6 +7475,11 @@ class AzureSqlSource(TabularSource): ~azure.synapse.artifacts.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object + :param partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: object + :param partition_settings: The settings that will be leveraged for Sql source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SqlPartitionSettings """ _validation = { @@ -6311,10 +7493,13 @@ class AzureSqlSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -6325,18 +7510,23 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, produce_additional_types: Optional[object] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureSqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.produce_additional_types = produce_additional_types + self.partition_option = partition_option + self.partition_settings = partition_settings class AzureSqlTableDataset(Dataset): @@ -6668,6 +7858,9 @@ class AzureTableSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object @@ -6687,6 +7880,7 @@ class AzureTableSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } @@ -6699,11 +7893,12 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, azure_table_source_query: Optional[object] = None, azure_table_source_ignore_table_not_found: Optional[object] = None, **kwargs ): - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureTableSource' # type: str self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found @@ -7104,6 +8299,81 @@ def __init__( self.compression = compression +class FormatReadSettings(msrest.serialization.Model): + """Format read settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BinaryReadSettings, DelimitedTextReadSettings, JsonReadSettings, XmlReadSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'BinaryReadSettings': 'BinaryReadSettings', 'DelimitedTextReadSettings': 'DelimitedTextReadSettings', 'JsonReadSettings': 'JsonReadSettings', 'XmlReadSettings': 'XmlReadSettings'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(FormatReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'FormatReadSettings' # type: str + + +class BinaryReadSettings(FormatReadSettings): + """Binary read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param compression_properties: Compression settings. + :type compression_properties: ~azure.synapse.artifacts.models.CompressionReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + compression_properties: Optional["CompressionReadSettings"] = None, + **kwargs + ): + super(BinaryReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'BinaryReadSettings' # type: str + self.compression_properties = compression_properties + + class BinarySink(CopySink): """A copy activity Binary sink. @@ -7186,6 +8456,8 @@ class BinarySource(CopySource): :type max_concurrent_connections: object :param store_settings: Binary store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param format_settings: Binary format settings. + :type format_settings: ~azure.synapse.artifacts.models.BinaryReadSettings """ _validation = { @@ -7199,6 +8471,7 @@ class BinarySource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'BinaryReadSettings'}, } def __init__( @@ -7209,11 +8482,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, + format_settings: Optional["BinaryReadSettings"] = None, **kwargs ): super(BinarySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'BinarySource' # type: str self.store_settings = store_settings + self.format_settings = format_settings class Trigger(msrest.serialization.Model): @@ -7277,7 +8552,7 @@ class MultiplePipelineTrigger(Trigger): """Base class for all triggers that support one to many model for trigger to pipeline. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger. + sub-classes are: BlobEventsTrigger, BlobTrigger, CustomEventsTrigger, ScheduleTrigger. Variables are only populated by the server, and will be ignored when sending a request. @@ -7314,7 +8589,7 @@ class MultiplePipelineTrigger(Trigger): } _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'CustomEventsTrigger': 'CustomEventsTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__( @@ -7731,6 +9006,9 @@ class CassandraSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). :type query: object @@ -7755,6 +9033,7 @@ class CassandraSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } @@ -7767,11 +9046,12 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, consistency_level: Optional[Union[str, "CassandraSourceReadConsistencyLevels"]] = None, **kwargs ): - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'CassandraSource' # type: str self.query = query self.consistency_level = consistency_level @@ -8084,12 +9364,9 @@ class CommonDataServiceForAppsLinkedService(LinkedService): :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential_type: A string from ServicePrincipalCredentialEnum or an + expression. + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -8124,7 +9401,7 @@ class CommonDataServiceForAppsLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -8146,7 +9423,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential_type: Optional[object] = None, service_principal_credential: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -8265,6 +9542,9 @@ class CommonDataServiceForAppsSource(CopySource): :param query: FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -8278,6 +9558,7 @@ class CommonDataServiceForAppsSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -8288,11 +9569,52 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'CommonDataServiceForAppsSource' # type: str self.query = query + self.additional_columns = additional_columns + + +class CompressionReadSettings(msrest.serialization.Model): + """Compression read settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(CompressionReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'CompressionReadSettings' # type: str class ConcurLinkedService(LinkedService): @@ -8313,6 +9635,9 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Concur. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object :param username: Required. The user name that you use to access Concur Service. @@ -8349,6 +9674,7 @@ class ConcurLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -8368,6 +9694,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, password: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, use_host_verification: Optional[object] = None, @@ -8377,6 +9704,7 @@ def __init__( ): super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Concur' # type: str + self.connection_properties = connection_properties self.client_id = client_id self.username = username self.password = password @@ -8476,6 +9804,9 @@ class ConcurSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -8492,6 +9823,7 @@ class ConcurSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -8503,10 +9835,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ConcurSource' # type: str self.query = query @@ -8562,10 +9895,20 @@ class CopyActivity(ExecutionActivity): EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.synapse.artifacts.models.RedirectIncompatibleRowSettings + :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer + need to provide when enabling session log. + :type log_storage_settings: ~azure.synapse.artifacts.models.LogStorageSettings + :param log_settings: Log settings customer needs provide when enabling log. + :type log_settings: ~azure.synapse.artifacts.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. :type preserve: list[object] + :param validate_data_consistency: Whether to enable Data Consistency validation. Type: boolean + (or Expression with resultType boolean). + :type validate_data_consistency: object + :param skip_error_file: Specify the fault tolerance for data consistency. + :type skip_error_file: ~azure.synapse.artifacts.models.SkipErrorFile """ _validation = { @@ -8595,8 +9938,12 @@ class CopyActivity(ExecutionActivity): 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, + 'skip_error_file': {'key': 'typeProperties.skipErrorFile', 'type': 'SkipErrorFile'}, } def __init__( @@ -8620,8 +9967,12 @@ def __init__( data_integration_units: Optional[object] = None, enable_skip_incompatible_row: Optional[object] = None, redirect_incompatible_row_settings: Optional["RedirectIncompatibleRowSettings"] = None, + log_storage_settings: Optional["LogStorageSettings"] = None, + log_settings: Optional["LogSettings"] = None, preserve_rules: Optional[List[object]] = None, preserve: Optional[List[object]] = None, + validate_data_consistency: Optional[object] = None, + skip_error_file: Optional["SkipErrorFile"] = None, **kwargs ): super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -8637,8 +9988,40 @@ def __init__( self.data_integration_units = data_integration_units self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.log_storage_settings = log_storage_settings + self.log_settings = log_settings self.preserve_rules = preserve_rules self.preserve = preserve + self.validate_data_consistency = validate_data_consistency + self.skip_error_file = skip_error_file + + +class CopyActivityLogSettings(msrest.serialization.Model): + """Settings for copy activity log. + + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object + """ + + _attribute_map = { + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + } + + def __init__( + self, + *, + log_level: Optional[object] = None, + enable_reliable_logging: Optional[object] = None, + **kwargs + ): + super(CopyActivityLogSettings, self).__init__(**kwargs) + self.log_level = log_level + self.enable_reliable_logging = enable_reliable_logging class CopyTranslator(msrest.serialization.Model): @@ -8985,6 +10368,9 @@ class CosmosDbMongoDbApiSource(CopySource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -9001,6 +10387,7 @@ class CosmosDbMongoDbApiSource(CopySource): 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -9014,6 +10401,7 @@ def __init__( cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -9022,6 +10410,7 @@ def __init__( self.cursor_methods = cursor_methods self.batch_size = batch_size self.query_timeout = query_timeout + self.additional_columns = additional_columns class CosmosDbSqlApiCollectionDataset(Dataset): @@ -9183,6 +10572,12 @@ class CosmosDbSqlApiSource(CopySource): :param preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). :type preferred_regions: object + :param detect_datetime: Whether detect primitive values as datetime values. Type: boolean (or + Expression with resultType boolean). + :type detect_datetime: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -9198,6 +10593,8 @@ class CosmosDbSqlApiSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, + 'detect_datetime': {'key': 'detectDatetime', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -9210,6 +10607,8 @@ def __init__( query: Optional[object] = None, page_size: Optional[object] = None, preferred_regions: Optional[object] = None, + detect_datetime: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -9217,6 +10616,8 @@ def __init__( self.query = query self.page_size = page_size self.preferred_regions = preferred_regions + self.detect_datetime = detect_datetime + self.additional_columns = additional_columns class CouchbaseLinkedService(LinkedService): @@ -9306,6 +10707,9 @@ class CouchbaseSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -9322,6 +10726,7 @@ class CouchbaseSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -9333,10 +10738,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'CouchbaseSource' # type: str self.query = query @@ -9542,6 +10948,9 @@ class CustomActivity(ExecutionActivity): :param retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). :type retention_time_in_days: object + :param auto_user_specification: Elevation level and scope for the user, default is nonadmin + task. Type: string (or Expression with resultType double). + :type auto_user_specification: object """ _validation = { @@ -9565,6 +10974,7 @@ class CustomActivity(ExecutionActivity): 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + 'auto_user_specification': {'key': 'typeProperties.autoUserSpecification', 'type': 'object'}, } def __init__( @@ -9583,6 +10993,7 @@ def __init__( reference_objects: Optional["CustomActivityReferenceObject"] = None, extended_properties: Optional[Dict[str, object]] = None, retention_time_in_days: Optional[object] = None, + auto_user_specification: Optional[object] = None, **kwargs ): super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -9593,6 +11004,7 @@ def __init__( self.reference_objects = reference_objects self.extended_properties = extended_properties self.retention_time_in_days = retention_time_in_days + self.auto_user_specification = auto_user_specification class CustomActivityReferenceObject(msrest.serialization.Model): @@ -9773,6 +11185,80 @@ def __init__( self.key = key +class CustomEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a custom event is received. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~azure.synapse.artifacts.models.TriggerPipelineReference] + :param subject_begins_with: The event subject must begin with the pattern provided for trigger + to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :type subject_begins_with: str + :param subject_ends_with: The event subject must end with the pattern provided for trigger to + fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :type subject_ends_with: str + :param events: Required. The list of event types that cause this trigger to fire. + :type events: list[object] + :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :type scope: str + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'subject_begins_with': {'key': 'typeProperties.subjectBeginsWith', 'type': 'str'}, + 'subject_ends_with': {'key': 'typeProperties.subjectEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[object]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__( + self, + *, + events: List[object], + scope: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, + subject_begins_with: Optional[str] = None, + subject_ends_with: Optional[str] = None, + **kwargs + ): + super(CustomEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.type = 'CustomEventsTrigger' # type: str + self.subject_begins_with = subject_begins_with + self.subject_ends_with = subject_ends_with + self.events = events + self.scope = scope + + class CustomSetupBase(msrest.serialization.Model): """The base definition of the custom setup. @@ -10691,6 +12177,10 @@ class DataFlowSink(Transformation): :type description: str :param dataset: Dataset reference. :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param linked_service: Linked service reference. + :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param schema_linked_service: Schema linked service reference. + :type schema_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference """ _validation = { @@ -10701,6 +12191,8 @@ class DataFlowSink(Transformation): 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, + 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, } def __init__( @@ -10709,10 +12201,14 @@ def __init__( name: str, description: Optional[str] = None, dataset: Optional["DatasetReference"] = None, + linked_service: Optional["LinkedServiceReference"] = None, + schema_linked_service: Optional["LinkedServiceReference"] = None, **kwargs ): super(DataFlowSink, self).__init__(name=name, description=description, **kwargs) self.dataset = dataset + self.linked_service = linked_service + self.schema_linked_service = schema_linked_service class DataFlowSource(Transformation): @@ -10726,6 +12222,10 @@ class DataFlowSource(Transformation): :type description: str :param dataset: Dataset reference. :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param linked_service: Linked service reference. + :type linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference + :param schema_linked_service: Schema linked service reference. + :type schema_linked_service: ~azure.synapse.artifacts.models.LinkedServiceReference """ _validation = { @@ -10736,6 +12236,8 @@ class DataFlowSource(Transformation): 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, + 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, } def __init__( @@ -10744,10 +12246,14 @@ def __init__( name: str, description: Optional[str] = None, dataset: Optional["DatasetReference"] = None, + linked_service: Optional["LinkedServiceReference"] = None, + schema_linked_service: Optional["LinkedServiceReference"] = None, **kwargs ): super(DataFlowSource, self).__init__(name=name, description=description, **kwargs) self.dataset = dataset + self.linked_service = linked_service + self.schema_linked_service = schema_linked_service class DataFlowSourceSetting(msrest.serialization.Model): @@ -10937,7 +12443,7 @@ class DatasetCompression(msrest.serialization.Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetTarCompression, DatasetTarGZipCompression, DatasetZipDeflateCompression. All required parameters must be populated in order to send to Azure. @@ -10958,7 +12464,7 @@ class DatasetCompression(msrest.serialization.Model): } _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'Tar': 'DatasetTarCompression', 'TarGZip': 'DatasetTarGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} } def __init__( @@ -11070,8 +12576,8 @@ class DatasetDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The Deflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param level: The Deflate compression level. + :type level: object """ _validation = { @@ -11081,14 +12587,14 @@ class DatasetDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -11127,8 +12633,8 @@ class DatasetGZipCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The GZip compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param level: The GZip compression level. + :type level: object """ _validation = { @@ -11138,14 +12644,14 @@ class DatasetGZipCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -11302,6 +12808,73 @@ def __init__( self.type = type +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(DatasetTarCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'Tar' # type: str + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The TarGZip compression level. + :type level: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + level: Optional[object] = None, + **kwargs + ): + super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarGZip' # type: str + self.level = level + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -11312,8 +12885,8 @@ class DatasetZipDeflateCompression(DatasetCompression): :type additional_properties: dict[str, object] :param type: Required. Type of dataset compression.Constant filled by server. :type type: str - :param level: The ZipDeflate compression level. Possible values include: "Optimal", "Fastest". - :type level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + :param level: The ZipDeflate compression level. + :type level: object """ _validation = { @@ -11323,14 +12896,14 @@ class DatasetZipDeflateCompression(DatasetCompression): _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, - level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + level: Optional[object] = None, **kwargs ): super(DatasetZipDeflateCompression, self).__init__(additional_properties=additional_properties, **kwargs) @@ -11356,29 +12929,34 @@ class Db2LinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param server: Required. Server name for connection. Type: string (or Expression with - resultType string). + :param connection_string: The connection string. It is mutually exclusive with server, + database, authenticationType, userName, packageCollection and certificateCommonName property. + Type: string, SecureString or AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Required. Server name for connection. It is mutually exclusive with + connectionString property. Type: string (or Expression with resultType string). :type server: object - :param database: Required. Database name for connection. Type: string (or Expression with - resultType string). + :param database: Required. Database name for connection. It is mutually exclusive with + connectionString property. Type: string (or Expression with resultType string). :type database: object - :param authentication_type: AuthenticationType to be used for connection. Possible values - include: "Basic". + :param authentication_type: AuthenticationType to be used for connection. It is mutually + exclusive with connectionString property. Possible values include: "Basic". :type authentication_type: str or ~azure.synapse.artifacts.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression with resultType - string). + :param username: Username for authentication. It is mutually exclusive with connectionString + property. Type: string (or Expression with resultType string). :type username: object :param password: Password for authentication. :type password: ~azure.synapse.artifacts.models.SecretBase - :param package_collection: Under where packages are created when querying database. Type: - string (or Expression with resultType string). + :param package_collection: Under where packages are created when querying database. It is + mutually exclusive with connectionString property. Type: string (or Expression with resultType + string). :type package_collection: object - :param certificate_common_name: Certificate Common Name when TLS is enabled. Type: string (or - Expression with resultType string). + :param certificate_common_name: Certificate Common Name when TLS is enabled. It is mutually + exclusive with connectionString property. Type: string (or Expression with resultType string). :type certificate_common_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). + encrypted using the integration runtime credential manager. It is mutually exclusive with + connectionString property. Type: string (or Expression with resultType string). :type encrypted_credential: object """ @@ -11395,6 +12973,7 @@ class Db2LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, @@ -11415,6 +12994,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_string: Optional[object] = None, authentication_type: Optional[Union[str, "Db2AuthenticationType"]] = None, username: Optional[object] = None, password: Optional["SecretBase"] = None, @@ -11425,6 +13005,7 @@ def __init__( ): super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Db2' # type: str + self.connection_string = connection_string self.server = server self.database = database self.authentication_type = authentication_type @@ -11457,6 +13038,9 @@ class Db2Source(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -11472,6 +13056,7 @@ class Db2Source(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -11483,10 +13068,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'Db2Source' # type: str self.query = query @@ -11607,6 +13193,8 @@ class DeleteActivity(ExecutionActivity): :type log_storage_settings: ~azure.synapse.artifacts.models.LogStorageSettings :param dataset: Required. Delete activity dataset reference. :type dataset: ~azure.synapse.artifacts.models.DatasetReference + :param store_settings: Delete activity store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings """ _validation = { @@ -11630,6 +13218,7 @@ class DeleteActivity(ExecutionActivity): 'enable_logging': {'key': 'typeProperties.enableLogging', 'type': 'object'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, + 'store_settings': {'key': 'typeProperties.storeSettings', 'type': 'StoreReadSettings'}, } def __init__( @@ -11647,6 +13236,7 @@ def __init__( max_concurrent_connections: Optional[int] = None, enable_logging: Optional[object] = None, log_storage_settings: Optional["LogStorageSettings"] = None, + store_settings: Optional["StoreReadSettings"] = None, **kwargs ): super(DeleteActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -11656,6 +13246,7 @@ def __init__( self.enable_logging = enable_logging self.log_storage_settings = log_storage_settings self.dataset = dataset + self.store_settings = store_settings class DeleteDataFlowDebugSessionRequest(msrest.serialization.Model): @@ -11725,11 +13316,10 @@ class DelimitedTextDataset(Dataset): resultType string). :type encoding_name: object :param compression_codec: Possible values include: "bzip2", "gzip", "deflate", "zipDeflate", - "snappy", "lz4". - :type compression_codec: str or ~azure.synapse.artifacts.models.DelimitedTextCompressionCodec - :param compression_level: The data compression method used for DelimitedText. Possible values - include: "Optimal", "Fastest". - :type compression_level: str or ~azure.synapse.artifacts.models.DatasetCompressionLevel + "snappy", "lz4", "tar", "tarGZip". + :type compression_codec: str or ~azure.synapse.artifacts.models.CompressionCodec + :param compression_level: The data compression method used for DelimitedText. + :type compression_level: object :param quote_char: The quote character. Type: string (or Expression with resultType string). :type quote_char: object :param escape_char: The escape character. Type: string (or Expression with resultType string). @@ -11762,7 +13352,7 @@ class DelimitedTextDataset(Dataset): 'row_delimiter': {'key': 'typeProperties.rowDelimiter', 'type': 'object'}, 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, - 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'str'}, + 'compression_level': {'key': 'typeProperties.compressionLevel', 'type': 'object'}, 'quote_char': {'key': 'typeProperties.quoteChar', 'type': 'object'}, 'escape_char': {'key': 'typeProperties.escapeChar', 'type': 'object'}, 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, @@ -11784,8 +13374,8 @@ def __init__( column_delimiter: Optional[object] = None, row_delimiter: Optional[object] = None, encoding_name: Optional[object] = None, - compression_codec: Optional[Union[str, "DelimitedTextCompressionCodec"]] = None, - compression_level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + compression_codec: Optional[Union[str, "CompressionCodec"]] = None, + compression_level: Optional[object] = None, quote_char: Optional[object] = None, escape_char: Optional[object] = None, first_row_as_header: Optional[object] = None, @@ -11806,45 +13396,6 @@ def __init__( self.null_value = null_value -class FormatReadSettings(msrest.serialization.Model): - """Format read settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DelimitedTextReadSettings. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The read setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'DelimitedTextReadSettings': 'DelimitedTextReadSettings'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - **kwargs - ): - super(FormatReadSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'FormatReadSettings' # type: str - - class DelimitedTextReadSettings(FormatReadSettings): """Delimited text read settings. @@ -11858,6 +13409,8 @@ class DelimitedTextReadSettings(FormatReadSettings): :param skip_line_count: Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). :type skip_line_count: object + :param compression_properties: Compression settings. + :type compression_properties: ~azure.synapse.artifacts.models.CompressionReadSettings """ _validation = { @@ -11868,6 +13421,7 @@ class DelimitedTextReadSettings(FormatReadSettings): 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, 'skip_line_count': {'key': 'skipLineCount', 'type': 'object'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, } def __init__( @@ -11875,11 +13429,13 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, skip_line_count: Optional[object] = None, + compression_properties: Optional["CompressionReadSettings"] = None, **kwargs ): super(DelimitedTextReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'DelimitedTextReadSettings' # type: str self.skip_line_count = skip_line_count + self.compression_properties = compression_properties class DelimitedTextSink(CopySink): @@ -11971,6 +13527,9 @@ class DelimitedTextSource(CopySource): :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings :param format_settings: DelimitedText format settings. :type format_settings: ~azure.synapse.artifacts.models.DelimitedTextReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -11985,6 +13544,7 @@ class DelimitedTextSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -11996,12 +13556,14 @@ def __init__( max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, format_settings: Optional["DelimitedTextReadSettings"] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'DelimitedTextSource' # type: str self.store_settings = store_settings self.format_settings = format_settings + self.additional_columns = additional_columns class DelimitedTextWriteSettings(FormatWriteSettings): @@ -12020,6 +13582,13 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -12032,6 +13601,8 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -12040,12 +13611,16 @@ def __init__( file_extension: object, additional_properties: Optional[Dict[str, object]] = None, quote_all_text: Optional[object] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, **kwargs ): super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'DelimitedTextWriteSettings' # type: str self.quote_all_text = quote_all_text self.file_extension = file_extension + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix class DependencyReference(msrest.serialization.Model): @@ -12287,6 +13862,9 @@ class DocumentDbCollectionSource(CopySource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -12302,6 +13880,7 @@ class DocumentDbCollectionSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -12314,6 +13893,7 @@ def __init__( query: Optional[object] = None, nesting_separator: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -12321,6 +13901,7 @@ def __init__( self.query = query self.nesting_separator = nesting_separator self.query_timeout = query_timeout + self.additional_columns = additional_columns class DrillLinkedService(LinkedService): @@ -12410,6 +13991,9 @@ class DrillSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -12426,6 +14010,7 @@ class DrillSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -12437,10 +14022,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'DrillSource' # type: str self.query = query @@ -12787,9 +14373,17 @@ class DynamicsAXSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -12803,7 +14397,9 @@ class DynamicsAXSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__( @@ -12814,12 +14410,15 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, + http_request_timeout: Optional[object] = None, **kwargs ): - super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'DynamicsAXSource' # type: str self.query = query + self.http_request_timeout = http_request_timeout class DynamicsCrmEntityDataset(Dataset): @@ -12943,12 +14542,9 @@ class DynamicsCrmLinkedService(LinkedService): :param service_principal_id: The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). :type service_principal_id: object - :param service_principal_credential_type: The service principal credential type to use in - Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' - for certificate. Type: string (or Expression with resultType string). Possible values include: - "ServicePrincipalKey", "ServicePrincipalCert". - :type service_principal_credential_type: str or - ~azure.synapse.artifacts.models.DynamicsServicePrincipalCredentialType + :param service_principal_credential_type: A string from ServicePrincipalCredentialEnum or an + expression. + :type service_principal_credential_type: object :param service_principal_credential: The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If @@ -12983,7 +14579,7 @@ class DynamicsCrmLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'str'}, + 'service_principal_credential_type': {'key': 'typeProperties.servicePrincipalCredentialType', 'type': 'object'}, 'service_principal_credential': {'key': 'typeProperties.servicePrincipalCredential', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -13005,7 +14601,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, service_principal_id: Optional[object] = None, - service_principal_credential_type: Optional[Union[str, "DynamicsServicePrincipalCredentialType"]] = None, + service_principal_credential_type: Optional[object] = None, service_principal_credential: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -13124,6 +14720,9 @@ class DynamicsCrmSource(CopySource): :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -13137,6 +14736,7 @@ class DynamicsCrmSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -13147,11 +14747,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'DynamicsCrmSource' # type: str self.query = query + self.additional_columns = additional_columns class DynamicsEntityDataset(Dataset): @@ -13247,18 +14849,18 @@ class DynamicsLinkedService(LinkedService): :type deployment_type: str or ~azure.synapse.artifacts.models.DynamicsDeploymentType :param host_name: The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). - :type host_name: str + :type host_name: object :param port: The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. - :type port: str + :type port: object :param service_uri: The URL to the Microsoft Dynamics server. The property is required for on- line and not allowed for on-prem. Type: string (or Expression with resultType string). - :type service_uri: str + :type service_uri: object :param organization_name: The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). - :type organization_name: str + :type organization_name: object :param authentication_type: Required. The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with @@ -13304,10 +14906,10 @@ class DynamicsLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'}, - 'host_name': {'key': 'typeProperties.hostName', 'type': 'str'}, - 'port': {'key': 'typeProperties.port', 'type': 'str'}, - 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'str'}, - 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'str'}, + 'host_name': {'key': 'typeProperties.hostName', 'type': 'object'}, + 'port': {'key': 'typeProperties.port', 'type': 'object'}, + 'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'}, + 'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -13327,10 +14929,10 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - host_name: Optional[str] = None, - port: Optional[str] = None, - service_uri: Optional[str] = None, - organization_name: Optional[str] = None, + host_name: Optional[object] = None, + port: Optional[object] = None, + service_uri: Optional[object] = None, + organization_name: Optional[object] = None, username: Optional[object] = None, password: Optional["SecretBase"] = None, service_principal_id: Optional[object] = None, @@ -13453,6 +15055,9 @@ class DynamicsSource(CopySource): :param query: FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -13466,6 +15071,7 @@ class DynamicsSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -13476,11 +15082,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'DynamicsSource' # type: str self.query = query + self.additional_columns = additional_columns class EloquaLinkedService(LinkedService): @@ -13664,6 +15272,9 @@ class EloquaSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13680,6 +15291,7 @@ class EloquaSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -13691,10 +15303,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'EloquaSource' # type: str self.query = query @@ -13894,6 +15507,160 @@ def __init__( self.expression = expression +class ExcelDataset(Dataset): + """Excel dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the excel storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param sheet_name: The sheet of excel file. Type: string (or Expression with resultType + string). + :type sheet_name: object + :param range: The partial data of one sheet. Type: string (or Expression with resultType + string). + :type range: object + :param first_row_as_header: When used as input, treat the first row of data as headers. When + used as output,write the headers into the output as the first row of data. The default value is + false. Type: boolean (or Expression with resultType boolean). + :type first_row_as_header: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + :param null_value: The null value string. Type: string (or Expression with resultType string). + :type null_value: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'sheet_name': {'key': 'typeProperties.sheetName', 'type': 'object'}, + 'range': {'key': 'typeProperties.range', 'type': 'object'}, + 'first_row_as_header': {'key': 'typeProperties.firstRowAsHeader', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + location: Optional["DatasetLocation"] = None, + sheet_name: Optional[object] = None, + range: Optional[object] = None, + first_row_as_header: Optional[object] = None, + compression: Optional["DatasetCompression"] = None, + null_value: Optional[object] = None, + **kwargs + ): + super(ExcelDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Excel' # type: str + self.location = location + self.sheet_name = sheet_name + self.range = range + self.first_row_as_header = first_row_as_header + self.compression = compression + self.null_value = null_value + + +class ExcelSource(CopySource): + """A copy activity excel source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Excel store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, + **kwargs + ): + super(ExcelSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'ExcelSource' # type: str + self.store_settings = store_settings + self.additional_columns = additional_columns + + class ExecuteDataFlowActivity(ExecutionActivity): """Execute data flow activity. @@ -13916,20 +15683,29 @@ class ExecuteDataFlowActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param data_flow: Required. Data flow reference. - :type data_flow: ~azure.synapse.artifacts.models.DataFlowReference + :param dataflow: Data flow reference. + :type dataflow: ~azure.synapse.artifacts.models.DataFlowReference :param staging: Staging info for execute data flow activity. :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo :param integration_runtime: The integration runtime reference. :type integration_runtime: ~azure.synapse.artifacts.models.IntegrationRuntimeReference :param compute: Compute properties for data flow activity. :type compute: ~azure.synapse.artifacts.models.ExecuteDataFlowActivityTypePropertiesCompute + :param trace_level: Trace level setting used for data flow monitoring output. Supported values + are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :type trace_level: object + :param continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :type continue_on_error: object + :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + the same save order to be processed concurrently. Type: boolean (or Expression with resultType + boolean). + :type run_concurrently: object """ _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'data_flow': {'required': True}, } _attribute_map = { @@ -13941,34 +15717,43 @@ class ExecuteDataFlowActivity(ExecutionActivity): 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'data_flow': {'key': 'typeProperties.dataFlow', 'type': 'DataFlowReference'}, + 'dataflow': {'key': 'typeProperties.dataflow', 'type': 'DataFlowReference'}, 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + 'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'}, + 'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'}, + 'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'}, } def __init__( self, *, name: str, - data_flow: "DataFlowReference", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, + dataflow: Optional["DataFlowReference"] = None, staging: Optional["DataFlowStagingInfo"] = None, integration_runtime: Optional["IntegrationRuntimeReference"] = None, compute: Optional["ExecuteDataFlowActivityTypePropertiesCompute"] = None, + trace_level: Optional[object] = None, + continue_on_error: Optional[object] = None, + run_concurrently: Optional[object] = None, **kwargs ): super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'ExecuteDataFlow' # type: str - self.data_flow = data_flow + self.dataflow = dataflow self.staging = staging self.integration_runtime = integration_runtime self.compute = compute + self.trace_level = trace_level + self.continue_on_error = continue_on_error + self.run_concurrently = run_concurrently class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): @@ -14413,14 +16198,27 @@ class FileServerReadSettings(StoreReadSettings): :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_end: object + :param file_filter: Specify a filter to be used to select a subset of files in the folderPath + rather than all files. Type: string (or Expression with resultType string). + :type file_filter: object """ _validation = { @@ -14434,9 +16232,13 @@ class FileServerReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + 'file_filter': {'key': 'fileFilter', 'type': 'object'}, } def __init__( @@ -14447,9 +16249,13 @@ def __init__( recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, + file_list_path: Optional[object] = None, enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, + delete_files_after_completion: Optional[object] = None, modified_datetime_start: Optional[object] = None, modified_datetime_end: Optional[object] = None, + file_filter: Optional[object] = None, **kwargs ): super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -14457,9 +16263,13 @@ def __init__( self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end + self.file_filter = file_filter class FileServerWriteSettings(StoreWriteSettings): @@ -14502,6 +16312,109 @@ def __init__( self.type = 'FileServerWriteSettings' # type: str +class FileShareDataset(Dataset): + """An on-premises file system dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param folder_path: The path of the on-premises file system. Type: string (or Expression with + resultType string). + :type folder_path: object + :param file_name: The name of the on-premises file system. Type: string (or Expression with + resultType string). + :type file_name: object + :param modified_datetime_start: The start of file's modified datetime. Type: string (or + Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: string (or Expression + with resultType string). + :type modified_datetime_end: object + :param format: The format of the files. + :type format: ~azure.synapse.artifacts.models.DatasetStorageFormat + :param file_filter: Specify a filter to be used to select a subset of files in the folderPath + rather than all files. Type: string (or Expression with resultType string). + :type file_filter: object + :param compression: The data compression method used for the file system. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'object'}, + 'file_name': {'key': 'typeProperties.fileName', 'type': 'object'}, + 'modified_datetime_start': {'key': 'typeProperties.modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'typeProperties.modifiedDatetimeEnd', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'file_filter': {'key': 'typeProperties.fileFilter', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + folder_path: Optional[object] = None, + file_name: Optional[object] = None, + modified_datetime_start: Optional[object] = None, + modified_datetime_end: Optional[object] = None, + format: Optional["DatasetStorageFormat"] = None, + file_filter: Optional[object] = None, + compression: Optional["DatasetCompression"] = None, + **kwargs + ): + super(FileShareDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'FileShare' # type: str + self.folder_path = folder_path + self.file_name = file_name + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.format = format + self.file_filter = file_filter + self.compression = compression + + class FileSystemSink(CopySink): """A copy activity file system sink. @@ -14585,6 +16498,9 @@ class FileSystemSource(CopySource): :param recursive: If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -14598,6 +16514,7 @@ class FileSystemSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'recursive': {'key': 'recursive', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -14608,11 +16525,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, recursive: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'FileSystemSource' # type: str self.recursive = recursive + self.additional_columns = additional_columns class FilterActivity(ControlActivity): @@ -14769,6 +16688,18 @@ class FtpReadSettings(StoreReadSettings): :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. :type use_binary_transfer: bool """ @@ -14784,6 +16715,10 @@ class FtpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, } @@ -14795,6 +16730,10 @@ def __init__( recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, + delete_files_after_completion: Optional[object] = None, + file_list_path: Optional[object] = None, use_binary_transfer: Optional[bool] = None, **kwargs ): @@ -14803,6 +16742,10 @@ def __init__( self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion + self.file_list_path = file_list_path self.use_binary_transfer = use_binary_transfer @@ -14970,6 +16913,10 @@ class GetMetadataActivity(ExecutionActivity): :type dataset: ~azure.synapse.artifacts.models.DatasetReference :param field_list: Fields of metadata to get from dataset. :type field_list: list[object] + :param store_settings: GetMetadata activity store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param format_settings: GetMetadata activity format settings. + :type format_settings: ~azure.synapse.artifacts.models.FormatReadSettings """ _validation = { @@ -14989,6 +16936,8 @@ class GetMetadataActivity(ExecutionActivity): 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'dataset': {'key': 'typeProperties.dataset', 'type': 'DatasetReference'}, 'field_list': {'key': 'typeProperties.fieldList', 'type': '[object]'}, + 'store_settings': {'key': 'typeProperties.storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'typeProperties.formatSettings', 'type': 'FormatReadSettings'}, } def __init__( @@ -15003,12 +16952,16 @@ def __init__( linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, field_list: Optional[List[object]] = None, + store_settings: Optional["StoreReadSettings"] = None, + format_settings: Optional["FormatReadSettings"] = None, **kwargs ): super(GetMetadataActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'GetMetadata' # type: str self.dataset = dataset self.field_list = field_list + self.store_settings = store_settings + self.format_settings = format_settings class GetSsisObjectMetadataRequest(msrest.serialization.Model): @@ -15302,6 +17255,9 @@ class GoogleAdWordsSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15318,6 +17274,7 @@ class GoogleAdWordsSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -15329,10 +17286,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GoogleAdWordsSource' # type: str self.query = query @@ -15564,6 +17522,9 @@ class GoogleBigQuerySource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15580,6 +17541,7 @@ class GoogleBigQuerySource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -15591,10 +17553,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GoogleBigQuerySource' # type: str self.query = query @@ -15751,8 +17714,18 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -15773,7 +17746,10 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -15787,7 +17763,10 @@ def __init__( wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, prefix: Optional[object] = None, + file_list_path: Optional[object] = None, enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, + delete_files_after_completion: Optional[object] = None, modified_datetime_start: Optional[object] = None, modified_datetime_end: Optional[object] = None, **kwargs @@ -15798,7 +17777,10 @@ def __init__( self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -15890,6 +17872,9 @@ class GreenplumSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15906,6 +17891,7 @@ class GreenplumSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -15917,10 +17903,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'GreenplumSource' # type: str self.query = query @@ -16209,6 +18196,9 @@ class HBaseSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16225,6 +18215,7 @@ class HBaseSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16236,10 +18227,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HBaseSource' # type: str self.query = query @@ -16385,8 +18377,15 @@ class HdfsReadSettings(StoreReadSettings): :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -16395,6 +18394,9 @@ class HdfsReadSettings(StoreReadSettings): :type modified_datetime_end: object :param distcp_settings: Specifies Distcp-related settings. :type distcp_settings: ~azure.synapse.artifacts.models.DistcpSettings + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object """ _validation = { @@ -16408,10 +18410,13 @@ class HdfsReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, } def __init__( @@ -16422,10 +18427,13 @@ def __init__( recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, + file_list_path: Optional[object] = None, enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, modified_datetime_start: Optional[object] = None, modified_datetime_end: Optional[object] = None, distcp_settings: Optional["DistcpSettings"] = None, + delete_files_after_completion: Optional[object] = None, **kwargs ): super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -16433,10 +18441,13 @@ def __init__( self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.distcp_settings = distcp_settings + self.delete_files_after_completion = delete_files_after_completion class HdfsSource(CopySource): @@ -17604,6 +19615,9 @@ class HiveSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -17620,6 +19634,7 @@ class HiveSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -17631,14 +19646,115 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HiveSource' # type: str self.query = query +class HttpDataset(Dataset): + """A file in an HTTP web server. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param relative_url: The relative URL based on the URL in the HttpLinkedService refers to an + HTTP file Type: string (or Expression with resultType string). + :type relative_url: object + :param request_method: The HTTP method for the HTTP request. Type: string (or Expression with + resultType string). + :type request_method: object + :param request_body: The body for the HTTP request. Type: string (or Expression with resultType + string). + :type request_body: object + :param additional_headers: The headers for the HTTP Request. e.g. request-header- + name-1:request-header-value-1 + ... + request-header-name-n:request-header-value-n Type: string (or Expression with resultType + string). + :type additional_headers: object + :param format: The format of files. + :type format: ~azure.synapse.artifacts.models.DatasetStorageFormat + :param compression: The data compression method used on files. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'relative_url': {'key': 'typeProperties.relativeUrl', 'type': 'object'}, + 'request_method': {'key': 'typeProperties.requestMethod', 'type': 'object'}, + 'request_body': {'key': 'typeProperties.requestBody', 'type': 'object'}, + 'additional_headers': {'key': 'typeProperties.additionalHeaders', 'type': 'object'}, + 'format': {'key': 'typeProperties.format', 'type': 'DatasetStorageFormat'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + relative_url: Optional[object] = None, + request_method: Optional[object] = None, + request_body: Optional[object] = None, + additional_headers: Optional[object] = None, + format: Optional["DatasetStorageFormat"] = None, + compression: Optional["DatasetCompression"] = None, + **kwargs + ): + super(HttpDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'HttpFile' # type: str + self.relative_url = relative_url + self.request_method = request_method + self.request_body = request_body + self.additional_headers = additional_headers + self.format = format + self.compression = compression + + class HttpLinkedService(LinkedService): """Linked service for an HTTP source. @@ -17765,6 +19881,11 @@ class HttpReadSettings(StoreReadSettings): :param request_timeout: Specifies the timeout for a HTTP client to get HTTP response from HTTP server. :type request_timeout: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object """ _validation = { @@ -17779,6 +19900,8 @@ class HttpReadSettings(StoreReadSettings): 'request_body': {'key': 'requestBody', 'type': 'object'}, 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'request_timeout': {'key': 'requestTimeout', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, } def __init__( @@ -17790,6 +19913,8 @@ def __init__( request_body: Optional[object] = None, additional_headers: Optional[object] = None, request_timeout: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, **kwargs ): super(HttpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -17798,6 +19923,8 @@ def __init__( self.request_body = request_body self.additional_headers = additional_headers self.request_timeout = request_timeout + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path class HttpServerLocation(DatasetLocation): @@ -18087,6 +20214,9 @@ class HubspotSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18103,6 +20233,7 @@ class HubspotSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18114,10 +20245,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'HubspotSource' # type: str self.query = query @@ -18409,6 +20541,9 @@ class ImpalaSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18425,6 +20560,7 @@ class ImpalaSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18436,10 +20572,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ImpalaSource' # type: str self.query = query @@ -18614,6 +20751,9 @@ class InformixSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -18629,6 +20769,7 @@ class InformixSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18640,10 +20781,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'InformixSource' # type: str self.query = query @@ -19378,6 +21520,9 @@ class JiraSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -19394,6 +21539,7 @@ class JiraSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -19405,10 +21551,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'JiraSource' # type: str self.query = query @@ -19571,6 +21718,42 @@ def __init__( self.json_path_definition = json_path_definition +class JsonReadSettings(FormatReadSettings): + """Json read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param compression_properties: Compression settings. + :type compression_properties: ~azure.synapse.artifacts.models.CompressionReadSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + compression_properties: Optional["CompressionReadSettings"] = None, + **kwargs + ): + super(JsonReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'JsonReadSettings' # type: str + self.compression_properties = compression_properties + + class JsonSink(CopySink): """A copy activity Json sink. @@ -19658,6 +21841,11 @@ class JsonSource(CopySource): :type max_concurrent_connections: object :param store_settings: Json store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param format_settings: Json format settings. + :type format_settings: ~azure.synapse.artifacts.models.JsonReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -19671,6 +21859,8 @@ class JsonSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'JsonReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -19681,11 +21871,15 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, + format_settings: Optional["JsonReadSettings"] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'JsonSource' # type: str self.store_settings = store_settings + self.format_settings = format_settings + self.additional_columns = additional_columns class JsonWriteSettings(FormatWriteSettings): @@ -20266,8 +22460,80 @@ def __init__( self.properties = properties +class LogLocationSettings(msrest.serialization.Model): + """Log location settings. + + All required parameters must be populated in order to send to Azure. + + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + path: Optional[object] = None, + **kwargs + ): + super(LogLocationSettings, self).__init__(**kwargs) + self.linked_service_name = linked_service_name + self.path = path + + +class LogSettings(msrest.serialization.Model): + """Log settings. + + All required parameters must be populated in order to send to Azure. + + :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + (or Expression with resultType boolean). + :type enable_copy_activity_log: object + :param copy_activity_log_settings: Specifies settings for copy activity log. + :type copy_activity_log_settings: ~azure.synapse.artifacts.models.CopyActivityLogSettings + :param log_location_settings: Required. Log location settings customer needs to provide when + enabling log. + :type log_location_settings: ~azure.synapse.artifacts.models.LogLocationSettings + """ + + _validation = { + 'log_location_settings': {'required': True}, + } + + _attribute_map = { + 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, + 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, + 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + } + + def __init__( + self, + *, + log_location_settings: "LogLocationSettings", + enable_copy_activity_log: Optional[object] = None, + copy_activity_log_settings: Optional["CopyActivityLogSettings"] = None, + **kwargs + ): + super(LogSettings, self).__init__(**kwargs) + self.enable_copy_activity_log = enable_copy_activity_log + self.copy_activity_log_settings = copy_activity_log_settings + self.log_location_settings = log_location_settings + + class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. + """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. @@ -20279,6 +22545,12 @@ class LogStorageSettings(msrest.serialization.Model): :param path: The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). :type path: object + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object """ _validation = { @@ -20289,6 +22561,8 @@ class LogStorageSettings(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'path': {'key': 'path', 'type': 'object'}, + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, } def __init__( @@ -20297,12 +22571,16 @@ def __init__( linked_service_name: "LinkedServiceReference", additional_properties: Optional[Dict[str, object]] = None, path: Optional[object] = None, + log_level: Optional[object] = None, + enable_reliable_logging: Optional[object] = None, **kwargs ): super(LogStorageSettings, self).__init__(**kwargs) self.additional_properties = additional_properties self.linked_service_name = linked_service_name self.path = path + self.log_level = log_level + self.enable_reliable_logging = enable_reliable_logging class LookupActivity(ExecutionActivity): @@ -20553,6 +22831,9 @@ class MagentoSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20569,6 +22850,7 @@ class MagentoSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20580,10 +22862,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MagentoSource' # type: str self.query = query @@ -20644,6 +22927,8 @@ class ManagedIntegrationRuntime(IntegrationRuntime): Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". :vartype state: str or ~azure.synapse.artifacts.models.IntegrationRuntimeState + :param managed_virtual_network: Managed Virtual Network reference. + :type managed_virtual_network: ~azure.synapse.artifacts.models.ManagedVirtualNetworkReference :param compute_properties: The compute resource for managed integration runtime. :type compute_properties: ~azure.synapse.artifacts.models.IntegrationRuntimeComputeProperties :param ssis_properties: SSIS properties for managed integration runtime. @@ -20660,6 +22945,7 @@ class ManagedIntegrationRuntime(IntegrationRuntime): 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, + 'managed_virtual_network': {'key': 'managedVirtualNetwork', 'type': 'ManagedVirtualNetworkReference'}, 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, } @@ -20669,6 +22955,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, + managed_virtual_network: Optional["ManagedVirtualNetworkReference"] = None, compute_properties: Optional["IntegrationRuntimeComputeProperties"] = None, ssis_properties: Optional["IntegrationRuntimeSsisProperties"] = None, **kwargs @@ -20676,10 +22963,47 @@ def __init__( super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) self.type = 'Managed' # type: str self.state = None + self.managed_virtual_network = managed_virtual_network self.compute_properties = compute_properties self.ssis_properties = ssis_properties +class ManagedVirtualNetworkReference(msrest.serialization.Model): + """Managed Virtual Network reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Managed Virtual Network reference type. Default value: + "ManagedVirtualNetworkReference". + :vartype type: str + :param reference_name: Required. Reference ManagedVirtualNetwork name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "ManagedVirtualNetworkReference" + + def __init__( + self, + *, + reference_name: str, + **kwargs + ): + super(ManagedVirtualNetworkReference, self).__init__(**kwargs) + self.reference_name = reference_name + + class ManagedVirtualNetworkSettings(msrest.serialization.Model): """Managed Virtual Network Settings. @@ -20857,6 +23181,9 @@ class MariaDBSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20873,6 +23200,7 @@ class MariaDBSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20884,10 +23212,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MariaDBSource' # type: str self.query = query @@ -21140,6 +23469,9 @@ class MarketoSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21156,6 +23488,7 @@ class MarketoSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21167,10 +23500,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MarketoSource' # type: str self.query = query @@ -21344,6 +23678,9 @@ class MicrosoftAccessSource(CopySource): :type max_concurrent_connections: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -21357,6 +23694,7 @@ class MicrosoftAccessSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -21367,11 +23705,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'MicrosoftAccessSource' # type: str self.query = query + self.additional_columns = additional_columns class MicrosoftAccessTableDataset(Dataset): @@ -21401,14 +23741,84 @@ class MicrosoftAccessTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType - string). - :type table_name: object + :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType + string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MicrosoftAccessTable' # type: str + self.table_name = table_name + + +class MongoDbAtlasCollectionDataset(Dataset): + """The MongoDB Atlas database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param collection: Required. The collection name of the MongoDB Atlas database. Type: string + (or Expression with resultType string). + :type collection: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection': {'required': True}, } _attribute_map = { @@ -21421,13 +23831,14 @@ class MicrosoftAccessTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", + collection: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -21435,12 +23846,151 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, **kwargs ): - super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'MicrosoftAccessTable' # type: str - self.table_name = table_name + super(MongoDbAtlasCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MongoDbAtlasCollection' # type: str + self.collection = collection + + +class MongoDbAtlasLinkedService(LinkedService): + """Linked service for MongoDB Atlas data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The MongoDB Atlas connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB Atlas database that you want to access. + Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + database: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + **kwargs + ): + super(MongoDbAtlasLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'MongoDbAtlas' # type: str + self.connection_string = connection_string + self.database = database + + +class MongoDbAtlasSource(CopySource): + """A copy activity source for a MongoDB Atlas database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~azure.synapse.artifacts.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user + or the application. This property's main purpose is to avoid hit the limitation of response + size. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + filter: Optional[object] = None, + cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, + batch_size: Optional[object] = None, + query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, + **kwargs + ): + super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MongoDbAtlasSource' # type: str + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.query_timeout = query_timeout + self.additional_columns = additional_columns class MongoDbCollectionDataset(Dataset): @@ -21693,6 +24243,9 @@ class MongoDbSource(CopySource): :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -21706,6 +24259,7 @@ class MongoDbSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -21716,11 +24270,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'MongoDbSource' # type: str self.query = query + self.additional_columns = additional_columns class MongoDbV2CollectionDataset(Dataset): @@ -21887,6 +24443,9 @@ class MongoDbV2Source(CopySource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -21903,6 +24462,7 @@ class MongoDbV2Source(CopySource): 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -21916,6 +24476,7 @@ def __init__( cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -21924,6 +24485,7 @@ def __init__( self.cursor_methods = cursor_methods self.batch_size = batch_size self.query_timeout = query_timeout + self.additional_columns = additional_columns class MySqlLinkedService(LinkedService): @@ -22013,6 +24575,9 @@ class MySqlSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -22028,6 +24593,7 @@ class MySqlSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22039,10 +24605,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MySqlSource' # type: str self.query = query @@ -22238,6 +24805,9 @@ class NetezzaSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22259,6 +24829,7 @@ class NetezzaSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, @@ -22272,12 +24843,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, partition_option: Optional[Union[str, "NetezzaPartitionOption"]] = None, partition_settings: Optional["NetezzaPartitionSettings"] = None, **kwargs ): - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'NetezzaSource' # type: str self.query = query self.partition_option = partition_option @@ -22822,6 +25394,10 @@ class ODataLinkedService(LinkedService): :param service_principal_id: Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_id: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param aad_resource_id: Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). :type aad_resource_id: object @@ -22864,6 +25440,7 @@ class ODataLinkedService(LinkedService): 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'aad_service_principal_credential_type': {'key': 'typeProperties.aadServicePrincipalCredentialType', 'type': 'str'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, @@ -22886,6 +25463,7 @@ def __init__( password: Optional["SecretBase"] = None, tenant: Optional[object] = None, service_principal_id: Optional[object] = None, + azure_cloud_type: Optional[object] = None, aad_resource_id: Optional[object] = None, aad_service_principal_credential_type: Optional[Union[str, "ODataAadServicePrincipalCredentialType"]] = None, service_principal_key: Optional["SecretBase"] = None, @@ -22902,6 +25480,7 @@ def __init__( self.password = password self.tenant = tenant self.service_principal_id = service_principal_id + self.azure_cloud_type = azure_cloud_type self.aad_resource_id = aad_resource_id self.aad_service_principal_credential_type = aad_service_principal_credential_type self.service_principal_key = service_principal_key @@ -23000,6 +25579,14 @@ class ODataSource(CopySource): :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -23013,6 +25600,8 @@ class ODataSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -23023,11 +25612,15 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query: Optional[object] = None, + http_request_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'ODataSource' # type: str self.query = query + self.http_request_timeout = http_request_timeout + self.additional_columns = additional_columns class OdbcLinkedService(LinkedService): @@ -23199,6 +25792,9 @@ class OdbcSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -23214,6 +25810,7 @@ class OdbcSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23225,10 +25822,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'OdbcSource' # type: str self.query = query @@ -23877,6 +26475,9 @@ class OracleServiceCloudSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23893,6 +26494,7 @@ class OracleServiceCloudSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23904,10 +26506,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'OracleServiceCloudSource' # type: str self.query = query @@ -24004,6 +26607,9 @@ class OracleSource(CopySource): :type partition_option: str or ~azure.synapse.artifacts.models.OraclePartitionOption :param partition_settings: The settings that will be leveraged for Oracle source partitioning. :type partition_settings: ~azure.synapse.artifacts.models.OraclePartitionSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -24020,6 +26626,7 @@ class OracleSource(CopySource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -24033,6 +26640,7 @@ def __init__( query_timeout: Optional[object] = None, partition_option: Optional[Union[str, "OraclePartitionOption"]] = None, partition_settings: Optional["OraclePartitionSettings"] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -24041,6 +26649,7 @@ def __init__( self.query_timeout = query_timeout self.partition_option = partition_option self.partition_settings = partition_settings + self.additional_columns = additional_columns class OracleTableDataset(Dataset): @@ -24153,7 +26762,7 @@ class OrcDataset(Dataset): :type folder: ~azure.synapse.artifacts.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". :type orc_compression_codec: str or ~azure.synapse.artifacts.models.OrcCompressionCodec """ @@ -24263,6 +26872,8 @@ class OrcSink(CopySink): :type max_concurrent_connections: object :param store_settings: ORC store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~azure.synapse.artifacts.models.OrcWriteSettings """ _validation = { @@ -24278,6 +26889,7 @@ class OrcSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__( @@ -24290,11 +26902,13 @@ def __init__( sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["OrcWriteSettings"] = None, **kwargs ): super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'OrcSink' # type: str self.store_settings = store_settings + self.format_settings = format_settings class OrcSource(CopySource): @@ -24318,6 +26932,9 @@ class OrcSource(CopySource): :type max_concurrent_connections: object :param store_settings: ORC store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -24331,6 +26948,7 @@ class OrcSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -24341,11 +26959,57 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'OrcSource' # type: str self.store_settings = store_settings + self.additional_columns = additional_columns + + +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, + **kwargs + ): + super(OrcWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'OrcWriteSettings' # type: str + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix class ParameterSpecification(msrest.serialization.Model): @@ -24410,8 +27074,8 @@ class ParquetDataset(Dataset): :type folder: ~azure.synapse.artifacts.models.DatasetFolder :param location: The location of the parquet storage. :type location: ~azure.synapse.artifacts.models.DatasetLocation - :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo". - :type compression_codec: str or ~azure.synapse.artifacts.models.ParquetCompressionCodec + :param compression_codec: A string from ParquetCompressionCodecEnum or an expression. + :type compression_codec: object """ _validation = { @@ -24430,7 +27094,7 @@ class ParquetDataset(Dataset): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, - 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'str'}, + 'compression_codec': {'key': 'typeProperties.compressionCodec', 'type': 'object'}, } def __init__( @@ -24445,7 +27109,7 @@ def __init__( annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, location: Optional["DatasetLocation"] = None, - compression_codec: Optional[Union[str, "ParquetCompressionCodec"]] = None, + compression_codec: Optional[object] = None, **kwargs ): super(ParquetDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) @@ -24520,6 +27184,8 @@ class ParquetSink(CopySink): :type max_concurrent_connections: object :param store_settings: Parquet store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~azure.synapse.artifacts.models.ParquetWriteSettings """ _validation = { @@ -24535,6 +27201,7 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__( @@ -24547,11 +27214,13 @@ def __init__( sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["ParquetWriteSettings"] = None, **kwargs ): super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'ParquetSink' # type: str self.store_settings = store_settings + self.format_settings = format_settings class ParquetSource(CopySource): @@ -24575,6 +27244,9 @@ class ParquetSource(CopySource): :type max_concurrent_connections: object :param store_settings: Parquet store settings. :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -24588,6 +27260,7 @@ class ParquetSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -24598,11 +27271,57 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreReadSettings"] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'ParquetSource' # type: str self.store_settings = store_settings + self.additional_columns = additional_columns + + +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, + **kwargs + ): + super(ParquetWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'ParquetWriteSettings' # type: str + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix class PaypalLinkedService(LinkedService): @@ -24785,6 +27504,9 @@ class PaypalSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24801,6 +27523,7 @@ class PaypalSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -24812,10 +27535,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PaypalSource' # type: str self.query = query @@ -25046,6 +27770,9 @@ class PhoenixSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -25062,6 +27789,7 @@ class PhoenixSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25073,10 +27801,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PhoenixSource' # type: str self.query = query @@ -25549,6 +28278,9 @@ class PostgreSqlSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -25564,6 +28296,7 @@ class PostgreSqlSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25575,10 +28308,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PostgreSqlSource' # type: str self.query = query @@ -25899,6 +28633,9 @@ class PrestoSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -25915,6 +28652,7 @@ class PrestoSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25926,10 +28664,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PrestoSource' # type: str self.query = query @@ -26146,6 +28885,9 @@ class QuickBooksLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to QuickBooks. It is mutually + exclusive with any other properties in the linked service. Type: object. + :type connection_properties: object :param endpoint: Required. The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com). :type endpoint: object @@ -26185,6 +28927,7 @@ class QuickBooksLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'company_id': {'key': 'typeProperties.companyId', 'type': 'object'}, 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'object'}, @@ -26209,12 +28952,14 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, use_encrypted_endpoints: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): super(QuickBooksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'QuickBooks' # type: str + self.connection_properties = connection_properties self.endpoint = endpoint self.company_id = company_id self.consumer_key = consumer_key @@ -26315,6 +29060,9 @@ class QuickBooksSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -26331,6 +29079,7 @@ class QuickBooksSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -26342,10 +29091,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'QuickBooksSource' # type: str self.query = query @@ -26361,7 +29111,7 @@ class RecurrenceSchedule(msrest.serialization.Model): :param hours: The hours. :type hours: list[int] :param week_days: The days of the week. - :type week_days: list[str or ~azure.synapse.artifacts.models.DayOfWeek] + :type week_days: list[str or ~azure.synapse.artifacts.models.DaysOfWeek] :param month_days: The month days. :type month_days: list[int] :param monthly_occurrences: The monthly occurrences. @@ -26383,7 +29133,7 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, minutes: Optional[List[int]] = None, hours: Optional[List[int]] = None, - week_days: Optional[List[Union[str, "DayOfWeek"]]] = None, + week_days: Optional[List[Union[str, "DaysOfWeek"]]] = None, month_days: Optional[List[int]] = None, monthly_occurrences: Optional[List["RecurrenceScheduleOccurrence"]] = None, **kwargs @@ -26529,6 +29279,9 @@ class RelationalSource(CopySource): :type max_concurrent_connections: object :param query: Database query. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -26542,6 +29295,7 @@ class RelationalSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -26552,11 +29306,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'RelationalSource' # type: str self.query = query + self.additional_columns = additional_columns class RelationalTableDataset(Dataset): @@ -26729,7 +29485,7 @@ class RerunTumblingWindowTrigger(Trigger): :vartype runtime_state: str or ~azure.synapse.artifacts.models.TriggerRuntimeState :param annotations: List of tags that can be used for describing the trigger. :type annotations: list[object] - :param parent_trigger: The parent trigger reference. + :param parent_trigger: Required. The parent trigger reference. :type parent_trigger: object :param requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. @@ -26737,17 +29493,18 @@ class RerunTumblingWindowTrigger(Trigger): :param requested_end_time: Required. The end time for the time period for which restatement is initiated. Only UTC time is currently supported. :type requested_end_time: ~datetime.datetime - :param max_concurrency: Required. The max number of parallel time windows (ready for execution) - for which a rerun is triggered. - :type max_concurrency: int + :param rerun_concurrency: Required. The max number of parallel time windows (ready for + execution) for which a rerun is triggered. + :type rerun_concurrency: int """ _validation = { 'type': {'required': True}, 'runtime_state': {'readonly': True}, + 'parent_trigger': {'required': True}, 'requested_start_time': {'required': True}, 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { @@ -26759,19 +29516,19 @@ class RerunTumblingWindowTrigger(Trigger): 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, } def __init__( self, *, + parent_trigger: object, requested_start_time: datetime.datetime, requested_end_time: datetime.datetime, - max_concurrency: int, + rerun_concurrency: int, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, annotations: Optional[List[object]] = None, - parent_trigger: Optional[object] = None, **kwargs ): super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) @@ -26779,7 +29536,7 @@ def __init__( self.parent_trigger = parent_trigger self.requested_start_time = requested_start_time self.requested_end_time = requested_end_time - self.max_concurrency = max_concurrency + self.rerun_concurrency = rerun_concurrency class RerunTumblingWindowTriggerActionParameters(msrest.serialization.Model): @@ -27007,6 +29764,9 @@ class ResponsysSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27023,6 +29783,7 @@ class ResponsysSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27034,10 +29795,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ResponsysSource' # type: str self.query = query @@ -27176,6 +29938,10 @@ class RestServiceLinkedService(LinkedService): :param tenant: The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. :type tenant: object + :param azure_cloud_type: Indicates the azure cloud type of the service principle auth. Allowed + values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data + factory regions’ cloud type. Type: string (or Expression with resultType string). + :type azure_cloud_type: object :param aad_resource_id: The resource you are requesting authorization to use. :type aad_resource_id: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are @@ -27205,6 +29971,7 @@ class RestServiceLinkedService(LinkedService): 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'aad_resource_id': {'key': 'typeProperties.aadResourceId', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -27225,6 +29992,7 @@ def __init__( service_principal_id: Optional[object] = None, service_principal_key: Optional["SecretBase"] = None, tenant: Optional[object] = None, + azure_cloud_type: Optional[object] = None, aad_resource_id: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -27239,10 +30007,98 @@ def __init__( self.service_principal_id = service_principal_id self.service_principal_key = service_principal_key self.tenant = tenant + self.azure_cloud_type = azure_cloud_type self.aad_resource_id = aad_resource_id self.encrypted_credential = encrypted_credential +class RestSink(CopySink): + """A copy activity Rest service Sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param request_method: The HTTP method used to call the RESTful API. The default is POST. Type: + string (or Expression with resultType string). + :type request_method: object + :param additional_headers: The additional HTTP headers in the request to the RESTful API. Type: + string (or Expression with resultType string). + :type additional_headers: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + :param request_interval: The time to await before sending next request, in milliseconds. + :type request_interval: object + :param http_compression_type: Http Compression Type to Send data in compressed format with + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + :type http_compression_type: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'request_method': {'key': 'requestMethod', 'type': 'object'}, + 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + request_method: Optional[object] = None, + additional_headers: Optional[object] = None, + http_request_timeout: Optional[object] = None, + request_interval: Optional[object] = None, + http_compression_type: Optional[object] = None, + **kwargs + ): + super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'RestSink' # type: str + self.request_method = request_method + self.additional_headers = additional_headers + self.http_request_timeout = http_request_timeout + self.request_interval = request_interval + self.http_compression_type = http_compression_type + + class RestSource(CopySource): """A copy activity Rest service source. @@ -27281,6 +30137,9 @@ class RestSource(CopySource): :type http_request_timeout: object :param request_interval: The time to await before sending next page request. :type request_interval: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -27299,6 +30158,7 @@ class RestSource(CopySource): 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -27314,6 +30174,7 @@ def __init__( pagination_rules: Optional[object] = None, http_request_timeout: Optional[object] = None, request_interval: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -27324,6 +30185,7 @@ def __init__( self.pagination_rules = pagination_rules self.http_request_timeout = http_request_timeout self.request_interval = request_interval + self.additional_columns = additional_columns class RetryPolicy(msrest.serialization.Model): @@ -27519,8 +30381,11 @@ class SalesforceLinkedService(LinkedService): :type username: object :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.synapse.artifacts.models.SecretBase - :param security_token: The security token is required to remotely access Salesforce instance. + :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.synapse.artifacts.models.SecretBase + :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with + resultType string). + :type api_version: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -27542,6 +30407,7 @@ class SalesforceLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -27557,6 +30423,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, security_token: Optional["SecretBase"] = None, + api_version: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): @@ -27566,6 +30433,7 @@ def __init__( self.username = username self.password = password self.security_token = security_token + self.api_version = api_version self.encrypted_credential = encrypted_credential @@ -27587,6 +30455,9 @@ class SalesforceMarketingCloudLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Salesforce Marketing Cloud. It is + mutually exclusive with any other properties in the linked service. Type: object. + :type connection_properties: object :param client_id: Required. The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). :type client_id: object @@ -27622,6 +30493,7 @@ class SalesforceMarketingCloudLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, @@ -27639,6 +30511,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, client_secret: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, use_host_verification: Optional[object] = None, @@ -27648,6 +30521,7 @@ def __init__( ): super(SalesforceMarketingCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SalesforceMarketingCloud' # type: str + self.connection_properties = connection_properties self.client_id = client_id self.client_secret = client_secret self.use_encrypted_endpoints = use_encrypted_endpoints @@ -27746,6 +30620,9 @@ class SalesforceMarketingCloudSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27762,6 +30639,7 @@ class SalesforceMarketingCloudSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27773,10 +30651,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SalesforceMarketingCloudSource' # type: str self.query = query @@ -27878,8 +30757,11 @@ class SalesforceServiceCloudLinkedService(LinkedService): :type username: object :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.synapse.artifacts.models.SecretBase - :param security_token: The security token is required to remotely access Salesforce instance. + :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.synapse.artifacts.models.SecretBase + :param api_version: The Salesforce API version used in ADF. Type: string (or Expression with + resultType string). + :type api_version: object :param extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). :type extended_properties: object @@ -27904,6 +30786,7 @@ class SalesforceServiceCloudLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -27920,6 +30803,7 @@ def __init__( username: Optional[object] = None, password: Optional["SecretBase"] = None, security_token: Optional["SecretBase"] = None, + api_version: Optional[object] = None, extended_properties: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs @@ -27930,6 +30814,7 @@ def __init__( self.username = username self.password = password self.security_token = security_token + self.api_version = api_version self.extended_properties = extended_properties self.encrypted_credential = encrypted_credential @@ -28105,6 +30990,9 @@ class SalesforceServiceCloudSource(CopySource): :param read_behavior: The read behavior for the operation. Default is Query. Possible values include: "Query", "QueryAll". :type read_behavior: str or ~azure.synapse.artifacts.models.SalesforceSourceReadBehavior + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -28119,6 +31007,7 @@ class SalesforceServiceCloudSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -28130,12 +31019,14 @@ def __init__( max_concurrent_connections: Optional[object] = None, query: Optional[object] = None, read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'SalesforceServiceCloudSource' # type: str self.query = query self.read_behavior = read_behavior + self.additional_columns = additional_columns class SalesforceSink(CopySink): @@ -28238,6 +31129,9 @@ class SalesforceSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object :param read_behavior: The read behavior for the operation. Default is Query. Possible values @@ -28256,6 +31150,7 @@ class SalesforceSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } @@ -28268,11 +31163,12 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, read_behavior: Optional[Union[str, "SalesforceSourceReadBehavior"]] = None, **kwargs ): - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SalesforceSource' # type: str self.query = query self.read_behavior = read_behavior @@ -28449,6 +31345,9 @@ class SapBwSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: MDX query. Type: string (or Expression with resultType string). :type query: object """ @@ -28464,6 +31363,7 @@ class SapBwSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -28475,10 +31375,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapBwSource' # type: str self.query = query @@ -28655,6 +31556,11 @@ class SapCloudForCustomerSink(CopySink): values include: "Insert", "Update". :type write_behavior: str or ~azure.synapse.artifacts.models.SapCloudForCustomerSinkWriteBehavior + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -28670,6 +31576,7 @@ class SapCloudForCustomerSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'write_behavior': {'key': 'writeBehavior', 'type': 'str'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__( @@ -28682,11 +31589,13 @@ def __init__( sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, write_behavior: Optional[Union[str, "SapCloudForCustomerSinkWriteBehavior"]] = None, + http_request_timeout: Optional[object] = None, **kwargs ): super(SapCloudForCustomerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'SapCloudForCustomerSink' # type: str self.write_behavior = write_behavior + self.http_request_timeout = http_request_timeout class SapCloudForCustomerSource(TabularSource): @@ -28711,9 +31620,17 @@ class SapCloudForCustomerSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -28727,7 +31644,9 @@ class SapCloudForCustomerSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__( @@ -28738,12 +31657,15 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, + http_request_timeout: Optional[object] = None, **kwargs ): - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapCloudForCustomerSource' # type: str self.query = query + self.http_request_timeout = http_request_timeout class SapEccLinkedService(LinkedService): @@ -28911,9 +31833,17 @@ class SapEccSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param http_request_timeout: The timeout (TimeSpan) to get an HTTP response. It is the timeout + to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string + (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object """ _validation = { @@ -28927,7 +31857,9 @@ class SapEccSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, } def __init__( @@ -28938,12 +31870,15 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, + http_request_timeout: Optional[object] = None, **kwargs ): - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapEccSource' # type: str self.query = query + self.http_request_timeout = http_request_timeout class SapHanaLinkedService(LinkedService): @@ -29074,6 +32009,9 @@ class SapHanaSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object :param packet_size: The packet size of data read from SAP HANA. Type: integer(or Expression @@ -29098,6 +32036,7 @@ class SapHanaSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, @@ -29112,13 +32051,14 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, packet_size: Optional[object] = None, partition_option: Optional[Union[str, "SapHanaPartitionOption"]] = None, partition_settings: Optional["SapHanaPartitionSettings"] = None, **kwargs ): - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapHanaSource' # type: str self.query = query self.packet_size = packet_size @@ -29232,12 +32172,24 @@ class SapOpenHubLinkedService(LinkedService): :param language: Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). :type language: object + :param system_id: SystemID of the SAP system where the table is located. Type: string (or + Expression with resultType string). + :type system_id: object :param user_name: Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). :type user_name: object :param password: Password to access the SAP BW server where the open hub destination is located. :type password: ~azure.synapse.artifacts.models.SecretBase + :param message_server: The hostname of the SAP Message Server. Type: string (or Expression with + resultType string). + :type message_server: object + :param message_server_service: The service name or port number of the Message Server. Type: + string (or Expression with resultType string). + :type message_server_service: object + :param logon_group: The Logon Group for the SAP System. Type: string (or Expression with + resultType string). + :type logon_group: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -29262,8 +32214,12 @@ class SapOpenHubLinkedService(LinkedService): 'system_number': {'key': 'typeProperties.systemNumber', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'language': {'key': 'typeProperties.language', 'type': 'object'}, + 'system_id': {'key': 'typeProperties.systemId', 'type': 'object'}, 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'message_server': {'key': 'typeProperties.messageServer', 'type': 'object'}, + 'message_server_service': {'key': 'typeProperties.messageServerService', 'type': 'object'}, + 'logon_group': {'key': 'typeProperties.logonGroup', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -29279,8 +32235,12 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, language: Optional[object] = None, + system_id: Optional[object] = None, user_name: Optional[object] = None, password: Optional["SecretBase"] = None, + message_server: Optional[object] = None, + message_server_service: Optional[object] = None, + logon_group: Optional[object] = None, encrypted_credential: Optional[object] = None, **kwargs ): @@ -29290,8 +32250,12 @@ def __init__( self.system_number = system_number self.client_id = client_id self.language = language + self.system_id = system_id self.user_name = user_name self.password = password + self.message_server = message_server + self.message_server_service = message_server_service + self.logon_group = logon_group self.encrypted_credential = encrypted_credential @@ -29317,6 +32281,9 @@ class SapOpenHubSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). :type exclude_last_request: object @@ -29324,6 +32291,13 @@ class SapOpenHubSource(TabularSource): requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). :type base_request_id: object + :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that + will be used to read data from SAP Table. Type: string (or Expression with resultType string). + :type custom_rfc_read_table_function_module: object + :param sap_data_column_delimiter: The single character that will be used as delimiter passed to + SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with + resultType string). + :type sap_data_column_delimiter: object """ _validation = { @@ -29337,8 +32311,11 @@ class SapOpenHubSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, + 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, } def __init__( @@ -29349,14 +32326,19 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, exclude_last_request: Optional[object] = None, base_request_id: Optional[object] = None, + custom_rfc_read_table_function_module: Optional[object] = None, + sap_data_column_delimiter: Optional[object] = None, **kwargs ): - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapOpenHubSource' # type: str self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id + self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.sap_data_column_delimiter = sap_data_column_delimiter class SapOpenHubTableDataset(Dataset): @@ -29721,6 +32703,9 @@ class SapTableSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object @@ -29739,6 +32724,10 @@ class SapTableSource(TabularSource): :param custom_rfc_read_table_function_module: Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). :type custom_rfc_read_table_function_module: object + :param sap_data_column_delimiter: The single character that will be used as delimiter passed to + SAP RFC as well as splitting the output data retrieved. Type: string (or Expression with + resultType string). + :type sap_data_column_delimiter: object :param partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". @@ -29759,12 +32748,14 @@ class SapTableSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, 'rfc_table_options': {'key': 'rfcTableOptions', 'type': 'object'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, + 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } @@ -29777,17 +32768,19 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, row_count: Optional[object] = None, row_skips: Optional[object] = None, rfc_table_fields: Optional[object] = None, rfc_table_options: Optional[object] = None, batch_size: Optional[object] = None, custom_rfc_read_table_function_module: Optional[object] = None, + sap_data_column_delimiter: Optional[object] = None, partition_option: Optional[Union[str, "SapTablePartitionOption"]] = None, partition_settings: Optional["SapTablePartitionSettings"] = None, **kwargs ): - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SapTableSource' # type: str self.row_count = row_count self.row_skips = row_skips @@ -29795,6 +32788,7 @@ def __init__( self.rfc_table_options = rfc_table_options self.batch_size = batch_size self.custom_rfc_read_table_function_module = custom_rfc_read_table_function_module + self.sap_data_column_delimiter = sap_data_column_delimiter self.partition_option = partition_option self.partition_settings = partition_settings @@ -30002,7 +32996,7 @@ class SelfDependencyTumblingWindowTriggerReference(DependencyReference): _validation = { 'type': {'required': True}, - 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'-((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } @@ -30265,6 +33259,9 @@ class ServiceNowSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -30281,6 +33278,7 @@ class ServiceNowSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -30292,10 +33290,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ServiceNowSource' # type: str self.query = query @@ -30421,6 +33420,18 @@ class SftpReadSettings(StoreReadSettings): :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition discovery starts from. Type: + string (or Expression with resultType string). + :type partition_root_path: object + :param file_list_path: Point to a text file that lists each file (relative path to the path + configured in the dataset) that you want to copy. Type: string (or Expression with resultType + string). + :type file_list_path: object + :param delete_files_after_completion: Indicates whether the source files need to be deleted + after copy completion. Default is false. Type: boolean (or Expression with resultType boolean). + :type delete_files_after_completion: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -30440,6 +33451,10 @@ class SftpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -30452,6 +33467,10 @@ def __init__( recursive: Optional[object] = None, wildcard_folder_path: Optional[object] = None, wildcard_file_name: Optional[object] = None, + enable_partition_discovery: Optional[bool] = None, + partition_root_path: Optional[object] = None, + file_list_path: Optional[object] = None, + delete_files_after_completion: Optional[object] = None, modified_datetime_start: Optional[object] = None, modified_datetime_end: Optional[object] = None, **kwargs @@ -30461,6 +33480,10 @@ def __init__( self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.file_list_path = file_list_path + self.delete_files_after_completion = delete_files_after_completion self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -30601,6 +33624,10 @@ class SftpWriteSettings(StoreWriteSettings): :param operation_timeout: Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). :type operation_timeout: object + :param use_temp_file_rename: Upload to temporary file(s) and rename. Disable this option if + your SFTP server doesn't support rename operation. Type: boolean (or Expression with resultType + boolean). + :type use_temp_file_rename: object """ _validation = { @@ -30613,6 +33640,7 @@ class SftpWriteSettings(StoreWriteSettings): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, + 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, } def __init__( @@ -30622,11 +33650,226 @@ def __init__( max_concurrent_connections: Optional[object] = None, copy_behavior: Optional[object] = None, operation_timeout: Optional[object] = None, + use_temp_file_rename: Optional[object] = None, **kwargs ): super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) self.type = 'SftpWriteSettings' # type: str self.operation_timeout = operation_timeout + self.use_temp_file_rename = use_temp_file_rename + + +class SharePointOnlineListLinkedService(LinkedService): + """SharePoint Online List linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param site_url: Required. The URL of the SharePoint Online site. For example, + https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType + string). + :type site_url: object + :param tenant_id: Required. The tenant ID under which your application resides. You can find it + from Azure portal Active Directory overview page. Type: string (or Expression with resultType + string). + :type tenant_id: object + :param service_principal_id: Required. The application (client) ID of your application + registered in Azure Active Directory. Make sure to grant SharePoint site permission to this + application. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The client secret of your application registered in + Azure Active Directory. Type: string (or Expression with resultType string). + :type service_principal_key: ~azure.synapse.artifacts.models.SecretBase + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'site_url': {'required': True}, + 'tenant_id': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'site_url': {'key': 'typeProperties.siteUrl', 'type': 'object'}, + 'tenant_id': {'key': 'typeProperties.tenantId', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + site_url: object, + tenant_id: object, + service_principal_id: object, + service_principal_key: "SecretBase", + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(SharePointOnlineListLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'SharePointOnlineList' # type: str + self.site_url = site_url + self.tenant_id = tenant_id + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + self.encrypted_credential = encrypted_credential + + +class SharePointOnlineListResourceDataset(Dataset): + """The sharepoint online list resource dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param list_name: The name of the SharePoint Online list. Type: string (or Expression with + resultType string). + :type list_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'list_name': {'key': 'typeProperties.listName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + list_name: Optional[object] = None, + **kwargs + ): + super(SharePointOnlineListResourceDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SharePointOnlineListResource' # type: str + self.list_name = list_name + + +class SharePointOnlineListSource(CopySource): + """A copy activity source for sharePoint online list source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: The OData query to filter the data in SharePoint Online list. For example, + "$top=1". Type: string (or Expression with resultType string). + :type query: object + :param http_request_timeout: The wait time to get a response from SharePoint Online. Default + value is 5 minutes (00:05:00). Type: string (or Expression with resultType string), pattern: + ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type http_request_timeout: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + http_request_timeout: Optional[object] = None, + **kwargs + ): + super(SharePointOnlineListSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SharePointOnlineListSource' # type: str + self.query = query + self.http_request_timeout = http_request_timeout class ShopifyLinkedService(LinkedService): @@ -30741,8 +33984,202 @@ class ShopifyObjectDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~azure.synapse.artifacts.models.DatasetFolder - :param table_name: The table name. Type: string (or Expression with resultType string). - :type table_name: object + :param table_name: The table name. Type: string (or Expression with resultType string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'ShopifyObject' # type: str + self.table_name = table_name + + +class ShopifySource(TabularSource): + """A copy activity Shopify Service source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] + :param query: A query to retrieve data from source. Type: string (or Expression with resultType + string). + :type query: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + 'query': {'key': 'query', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, + query: Optional[object] = None, + **kwargs + ): + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + self.type = 'ShopifySource' # type: str + self.query = query + + +class SkipErrorFile(msrest.serialization.Model): + """Skip error file. + + :param file_missing: Skip if file is deleted by other client during copy. Default is true. + Type: boolean (or Expression with resultType boolean). + :type file_missing: object + :param data_inconsistency: Skip if source/sink file changed by other concurrent write. Default + is false. Type: boolean (or Expression with resultType boolean). + :type data_inconsistency: object + """ + + _attribute_map = { + 'file_missing': {'key': 'fileMissing', 'type': 'object'}, + 'data_inconsistency': {'key': 'dataInconsistency', 'type': 'object'}, + } + + def __init__( + self, + *, + file_missing: Optional[object] = None, + data_inconsistency: Optional[object] = None, + **kwargs + ): + super(SkipErrorFile, self).__init__(**kwargs) + self.file_missing = file_missing + self.data_inconsistency = data_inconsistency + + +class Sku(msrest.serialization.Model): + """SQL pool SKU. + + :param tier: The service tier. + :type tier: str + :param name: The SKU name. + :type name: str + :param capacity: If the SKU supports scale out/in then the capacity integer should be included. + If scale out/in is not possible for the resource this may be omitted. + :type capacity: int + """ + + _attribute_map = { + 'tier': {'key': 'tier', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, + } + + def __init__( + self, + *, + tier: Optional[str] = None, + name: Optional[str] = None, + capacity: Optional[int] = None, + **kwargs + ): + super(Sku, self).__init__(**kwargs) + self.tier = tier + self.name = name + self.capacity = capacity + + +class SnowflakeDataset(Dataset): + """The snowflake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param schema_type_properties_schema: The schema name of the Snowflake database. Type: string + (or Expression with resultType string). + :type schema_type_properties_schema: object + :param table: The table name of the Snowflake database. Type: string (or Expression with + resultType string). + :type table: object """ _validation = { @@ -30760,7 +34197,8 @@ class ShopifyObjectDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'schema_type_properties_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__( @@ -30774,16 +34212,245 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, + schema_type_properties_schema: Optional[object] = None, + table: Optional[object] = None, **kwargs ): - super(ShopifyObjectDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'ShopifyObject' # type: str - self.table_name = table_name + super(SnowflakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'SnowflakeTable' # type: str + self.schema_type_properties_schema = schema_type_properties_schema + self.table = table -class ShopifySource(TabularSource): - """A copy activity Shopify Service source. +class SnowflakeExportCopyCommand(ExportSettings): + """Snowflake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The export setting type.Constant filled by server. + :type type: str + :param additional_copy_options: Additional copy options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" }. + :type additional_copy_options: dict[str, object] + :param additional_format_options: Additional format options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" + }. + :type additional_format_options: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, + 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + additional_copy_options: Optional[Dict[str, object]] = None, + additional_format_options: Optional[Dict[str, object]] = None, + **kwargs + ): + super(SnowflakeExportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'SnowflakeExportCopyCommand' # type: str + self.additional_copy_options = additional_copy_options + self.additional_format_options = additional_format_options + + +class SnowflakeImportCopyCommand(ImportSettings): + """Snowflake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + :param additional_copy_options: Additional copy options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" }. + :type additional_copy_options: dict[str, object] + :param additional_format_options: Additional format options directly passed to snowflake Copy + Command. Type: key value pairs (value should be string type) (or Expression with resultType + object). Example: "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": + "'FALSE'" }. + :type additional_format_options: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, + 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + additional_copy_options: Optional[Dict[str, object]] = None, + additional_format_options: Optional[Dict[str, object]] = None, + **kwargs + ): + super(SnowflakeImportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'SnowflakeImportCopyCommand' # type: str + self.additional_copy_options = additional_copy_options + self.additional_format_options = additional_format_options + + +class SnowflakeLinkedService(LinkedService): + """Snowflake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~azure.synapse.artifacts.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The connection string of snowflake. Type: string, + SecureString. + :type connection_string: object + :param password: The Azure key vault secret reference of password in connection string. + :type password: ~azure.synapse.artifacts.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + password: Optional["AzureKeyVaultSecretReference"] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(SnowflakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'Snowflake' # type: str + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + + +class SnowflakeSink(CopySink): + """A copy activity snowflake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param import_settings: Snowflake import settings. + :type import_settings: ~azure.synapse.artifacts.models.SnowflakeImportCopyCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + import_settings: Optional["SnowflakeImportCopyCommand"] = None, + **kwargs + ): + super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SnowflakeSink' # type: str + self.pre_copy_script = pre_copy_script + self.import_settings = import_settings + + +class SnowflakeSource(CopySource): + """A copy activity snowflake source. All required parameters must be populated in order to send to Azure. @@ -30801,12 +34468,10 @@ class ShopifySource(TabularSource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object - :param query_timeout: Query timeout. Type: string (or Expression with resultType string), - pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). - :type query_timeout: object - :param query: A query to retrieve data from source. Type: string (or Expression with resultType - string). + :param query: Snowflake Sql query. Type: string (or Expression with resultType string). :type query: object + :param export_settings: Snowflake export settings. + :type export_settings: ~azure.synapse.artifacts.models.SnowflakeExportCopyCommand """ _validation = { @@ -30819,8 +34484,8 @@ class ShopifySource(TabularSource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, } def __init__( @@ -30830,45 +34495,14 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, - query_timeout: Optional[object] = None, query: Optional[object] = None, + export_settings: Optional["SnowflakeExportCopyCommand"] = None, **kwargs ): - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) - self.type = 'ShopifySource' # type: str + super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'SnowflakeSource' # type: str self.query = query - - -class Sku(msrest.serialization.Model): - """SQL pool SKU. - - :param tier: The service tier. - :type tier: str - :param name: The SKU name. - :type name: str - :param capacity: If the SKU supports scale out/in then the capacity integer should be included. - If scale out/in is not possible for the resource this may be omitted. - :type capacity: int - """ - - _attribute_map = { - 'tier': {'key': 'tier', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'capacity': {'key': 'capacity', 'type': 'int'}, - } - - def __init__( - self, - *, - tier: Optional[str] = None, - name: Optional[str] = None, - capacity: Optional[int] = None, - **kwargs - ): - super(Sku, self).__init__(**kwargs) - self.tier = tier - self.name = name - self.capacity = capacity + self.export_settings = export_settings class SparkBatchJob(msrest.serialization.Model): @@ -31718,6 +35352,9 @@ class SparkSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31734,6 +35371,7 @@ class SparkSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31745,10 +35383,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SparkSource' # type: str self.query = query @@ -31906,6 +35545,9 @@ class SqlDWSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -31917,6 +35559,11 @@ class SqlDWSource(TabularSource): Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. :type stored_procedure_parameters: object + :param partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: object + :param partition_settings: The settings that will be leveraged for Sql source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SqlPartitionSettings """ _validation = { @@ -31930,9 +35577,12 @@ class SqlDWSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -31943,16 +35593,21 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[object] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlDWSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters + self.partition_option = partition_option + self.partition_settings = partition_settings class SqlMISink(CopySink): @@ -32069,6 +35724,9 @@ class SqlMISource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a Azure SQL Managed @@ -32081,6 +35739,11 @@ class SqlMISource(TabularSource): ~azure.synapse.artifacts.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object + :param partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: object + :param partition_settings: The settings that will be leveraged for Sql source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SqlPartitionSettings """ _validation = { @@ -32094,10 +35757,13 @@ class SqlMISource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -32108,18 +35774,62 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, produce_additional_types: Optional[object] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlMISource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.produce_additional_types = produce_additional_types + self.partition_option = partition_option + self.partition_settings = partition_settings + + +class SqlPartitionSettings(msrest.serialization.Model): + """The settings that will be leveraged for Sql source partitioning. + + :param partition_column_name: The name of the column in integer or datetime type that will be + used for proceeding partitioning. If not specified, the primary key of the table is auto- + detected and used as the partition column. Type: string (or Expression with resultType string). + :type partition_column_name: object + :param partition_upper_bound: The maximum value of the partition column for partition range + splitting. This value is used to decide the partition stride, not for filtering the rows in + table. All rows in the table or query result will be partitioned and copied. Type: string (or + Expression with resultType string). + :type partition_upper_bound: object + :param partition_lower_bound: The minimum value of the partition column for partition range + splitting. This value is used to decide the partition stride, not for filtering the rows in + table. All rows in the table or query result will be partitioned and copied. Type: string (or + Expression with resultType string). + :type partition_lower_bound: object + """ + + _attribute_map = { + 'partition_column_name': {'key': 'partitionColumnName', 'type': 'object'}, + 'partition_upper_bound': {'key': 'partitionUpperBound', 'type': 'object'}, + 'partition_lower_bound': {'key': 'partitionLowerBound', 'type': 'object'}, + } + + def __init__( + self, + *, + partition_column_name: Optional[object] = None, + partition_upper_bound: Optional[object] = None, + partition_lower_bound: Optional[object] = None, + **kwargs + ): + super(SqlPartitionSettings, self).__init__(**kwargs) + self.partition_column_name = partition_column_name + self.partition_upper_bound = partition_upper_bound + self.partition_lower_bound = partition_lower_bound class SqlPool(TrackedResource): @@ -32729,6 +36439,9 @@ class SqlServerSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -32741,6 +36454,11 @@ class SqlServerSource(TabularSource): ~azure.synapse.artifacts.models.StoredProcedureParameter] :param produce_additional_types: Which additional types to produce. :type produce_additional_types: object + :param partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: object + :param partition_settings: The settings that will be leveraged for Sql source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SqlPartitionSettings """ _validation = { @@ -32754,10 +36472,13 @@ class SqlServerSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -32768,18 +36489,23 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, produce_additional_types: Optional[object] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlServerSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters self.produce_additional_types = produce_additional_types + self.partition_option = partition_option + self.partition_settings = partition_settings class SqlServerStoredProcedureActivity(ExecutionActivity): @@ -33047,6 +36773,9 @@ class SqlSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object :param sql_reader_stored_procedure_name: Name of the stored procedure for a SQL Database @@ -33057,6 +36786,15 @@ class SqlSource(TabularSource): Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.synapse.artifacts.models.StoredProcedureParameter] + :param isolation_level: Specifies the transaction locking behavior for the SQL source. Allowed + values: ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value + is ReadCommitted. Type: string (or Expression with resultType string). + :type isolation_level: object + :param partition_option: The partition mechanism that will be used for Sql read in parallel. + Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". + :type partition_option: object + :param partition_settings: The settings that will be leveraged for Sql source partitioning. + :type partition_settings: ~azure.synapse.artifacts.models.SqlPartitionSettings """ _validation = { @@ -33070,9 +36808,13 @@ class SqlSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -33083,16 +36825,23 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, + isolation_level: Optional[object] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters + self.isolation_level = isolation_level + self.partition_option = partition_option + self.partition_settings = partition_settings class SquareLinkedService(LinkedService): @@ -33113,6 +36862,9 @@ class SquareLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Square. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object :param host: Required. The URL of the Square instance. (i.e. mystore.mysquare.com). :type host: object :param client_id: Required. The client ID associated with your Square application. @@ -33152,6 +36904,7 @@ class SquareLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'client_secret': {'key': 'typeProperties.clientSecret', 'type': 'SecretBase'}, @@ -33173,6 +36926,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, client_secret: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, use_host_verification: Optional[object] = None, @@ -33182,6 +36936,7 @@ def __init__( ): super(SquareLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Square' # type: str + self.connection_properties = connection_properties self.host = host self.client_id = client_id self.client_secret = client_secret @@ -33282,6 +37037,9 @@ class SquareSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -33298,6 +37056,7 @@ class SquareSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -33309,10 +37068,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SquareSource' # type: str self.query = query @@ -33557,7 +37317,7 @@ class SSISPackageLocation(msrest.serialization.Model): string). :type package_path: object :param type: The type of SSIS package location. Possible values include: "SSISDB", "File", - "InlinePackage". + "InlinePackage", "PackageStore". :type type: str or ~azure.synapse.artifacts.models.SsisPackageLocationType :param package_password: Password of the package. :type package_password: ~azure.synapse.artifacts.models.SecretBase @@ -33566,6 +37326,8 @@ class SSISPackageLocation(msrest.serialization.Model): :param configuration_path: The configuration file of the package execution. Type: string (or Expression with resultType string). :type configuration_path: object + :param configuration_access_credential: The configuration file access credential. + :type configuration_access_credential: ~azure.synapse.artifacts.models.SSISAccessCredential :param package_name: The package name. :type package_name: str :param package_content: The embedded package content. Type: string (or Expression with @@ -33583,6 +37345,7 @@ class SSISPackageLocation(msrest.serialization.Model): 'package_password': {'key': 'typeProperties.packagePassword', 'type': 'SecretBase'}, 'access_credential': {'key': 'typeProperties.accessCredential', 'type': 'SSISAccessCredential'}, 'configuration_path': {'key': 'typeProperties.configurationPath', 'type': 'object'}, + 'configuration_access_credential': {'key': 'typeProperties.configurationAccessCredential', 'type': 'SSISAccessCredential'}, 'package_name': {'key': 'typeProperties.packageName', 'type': 'str'}, 'package_content': {'key': 'typeProperties.packageContent', 'type': 'object'}, 'package_last_modified_date': {'key': 'typeProperties.packageLastModifiedDate', 'type': 'str'}, @@ -33597,6 +37360,7 @@ def __init__( package_password: Optional["SecretBase"] = None, access_credential: Optional["SSISAccessCredential"] = None, configuration_path: Optional[object] = None, + configuration_access_credential: Optional["SSISAccessCredential"] = None, package_name: Optional[str] = None, package_content: Optional[object] = None, package_last_modified_date: Optional[str] = None, @@ -33609,6 +37373,7 @@ def __init__( self.package_password = package_password self.access_credential = access_credential self.configuration_path = configuration_path + self.configuration_access_credential = configuration_access_credential self.package_name = package_name self.package_content = package_content self.package_last_modified_date = package_last_modified_date @@ -34000,6 +37765,9 @@ class SybaseSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object """ @@ -34015,6 +37783,7 @@ class SybaseSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -34026,10 +37795,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SybaseSource' # type: str self.query = query @@ -34330,6 +38100,11 @@ class TabularTranslator(CopyTranslator): [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or Expression with resultType object). :type mappings: object + :param type_conversion: Whether to enable the advanced type conversion feature in the Copy + activity. Type: boolean (or Expression with resultType boolean). + :type type_conversion: object + :param type_conversion_settings: Type conversion settings. + :type type_conversion_settings: ~azure.synapse.artifacts.models.TypeConversionSettings """ _validation = { @@ -34344,6 +38119,8 @@ class TabularTranslator(CopyTranslator): 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, 'map_complex_values_to_string': {'key': 'mapComplexValuesToString', 'type': 'object'}, 'mappings': {'key': 'mappings', 'type': 'object'}, + 'type_conversion': {'key': 'typeConversion', 'type': 'object'}, + 'type_conversion_settings': {'key': 'typeConversionSettings', 'type': 'TypeConversionSettings'}, } def __init__( @@ -34355,6 +38132,8 @@ def __init__( collection_reference: Optional[object] = None, map_complex_values_to_string: Optional[object] = None, mappings: Optional[object] = None, + type_conversion: Optional[object] = None, + type_conversion_settings: Optional["TypeConversionSettings"] = None, **kwargs ): super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) @@ -34364,6 +38143,82 @@ def __init__( self.collection_reference = collection_reference self.map_complex_values_to_string = map_complex_values_to_string self.mappings = mappings + self.type_conversion = type_conversion + self.type_conversion_settings = type_conversion_settings + + +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + preserve_compression_file_name_as_folder: Optional[object] = None, + **kwargs + ): + super(TarGZipReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarGZipReadSettings' # type: str + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + preserve_compression_file_name_as_folder: Optional[object] = None, + **kwargs + ): + super(TarReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarReadSettings' # type: str + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder class TeradataLinkedService(LinkedService): @@ -34506,6 +38361,9 @@ class TeradataSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object :param partition_option: The partition mechanism that will be used for teradata read in @@ -34527,6 +38385,7 @@ class TeradataSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, @@ -34540,12 +38399,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, partition_option: Optional[Union[str, "TeradataPartitionOption"]] = None, partition_settings: Optional["TeradataPartitionSettings"] = None, **kwargs ): - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'TeradataSource' # type: str self.query = query self.partition_option = partition_option @@ -35084,7 +38944,7 @@ class TumblingWindowTrigger(Trigger): trigger window that is ready. :type pipeline: ~azure.synapse.artifacts.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: - "Minute", "Hour". + "Minute", "Hour", "Month". :type frequency: str or ~azure.synapse.artifacts.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. @@ -35186,7 +39046,7 @@ class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): _validation = { 'type': {'required': True}, 'reference_trigger': {'required': True}, - 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'-?((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } @@ -35211,6 +39071,58 @@ def __init__( self.size = size +class TypeConversionSettings(msrest.serialization.Model): + """Type conversion settings. + + :param allow_data_truncation: Whether to allow data truncation when converting the data. Type: + boolean (or Expression with resultType boolean). + :type allow_data_truncation: object + :param treat_boolean_as_number: Whether to treat boolean values as numbers. Type: boolean (or + Expression with resultType boolean). + :type treat_boolean_as_number: object + :param date_time_format: The format for DateTime values. Type: string (or Expression with + resultType string). + :type date_time_format: object + :param date_time_offset_format: The format for DateTimeOffset values. Type: string (or + Expression with resultType string). + :type date_time_offset_format: object + :param time_span_format: The format for TimeSpan values. Type: string (or Expression with + resultType string). + :type time_span_format: object + :param culture: The culture used to convert data from/to string. Type: string (or Expression + with resultType string). + :type culture: object + """ + + _attribute_map = { + 'allow_data_truncation': {'key': 'allowDataTruncation', 'type': 'object'}, + 'treat_boolean_as_number': {'key': 'treatBooleanAsNumber', 'type': 'object'}, + 'date_time_format': {'key': 'dateTimeFormat', 'type': 'object'}, + 'date_time_offset_format': {'key': 'dateTimeOffsetFormat', 'type': 'object'}, + 'time_span_format': {'key': 'timeSpanFormat', 'type': 'object'}, + 'culture': {'key': 'culture', 'type': 'object'}, + } + + def __init__( + self, + *, + allow_data_truncation: Optional[object] = None, + treat_boolean_as_number: Optional[object] = None, + date_time_format: Optional[object] = None, + date_time_offset_format: Optional[object] = None, + time_span_format: Optional[object] = None, + culture: Optional[object] = None, + **kwargs + ): + super(TypeConversionSettings, self).__init__(**kwargs) + self.allow_data_truncation = allow_data_truncation + self.treat_boolean_as_number = treat_boolean_as_number + self.date_time_format = date_time_format + self.date_time_offset_format = date_time_offset_format + self.time_span_format = time_span_format + self.culture = culture + + class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. @@ -35516,6 +39428,9 @@ class VerticaSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -35532,6 +39447,7 @@ class VerticaSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -35543,10 +39459,11 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'VerticaSource' # type: str self.query = query @@ -35672,7 +39589,7 @@ class WaitActivity(ControlActivity): :param user_properties: Activity user properties. :type user_properties: list[~azure.synapse.artifacts.models.UserProperty] :param wait_time_in_seconds: Required. Duration in seconds. - :type wait_time_in_seconds: int + :type wait_time_in_seconds: object """ _validation = { @@ -35688,14 +39605,14 @@ class WaitActivity(ControlActivity): 'description': {'key': 'description', 'type': 'str'}, 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'int'}, + 'wait_time_in_seconds': {'key': 'typeProperties.waitTimeInSeconds', 'type': 'object'}, } def __init__( self, *, name: str, - wait_time_in_seconds: int, + wait_time_in_seconds: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, @@ -36063,10 +39980,10 @@ class WebHookActivity(ControlActivity): :type body: object :param authentication: Authentication method used for calling the endpoint. :type authentication: ~azure.synapse.artifacts.models.WebActivityAuthentication - :param report_status_on_call_back: When set to true, - statusCode, output and error in callback request body will be - consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in - callback request. Default is false. Type: boolean (or Expression with resultType boolean). + :param report_status_on_call_back: When set to true, statusCode, output and error in callback + request body will be consumed by activity. The activity can be marked as failed by setting + statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with + resultType boolean). :type report_status_on_call_back: object """ @@ -36193,6 +40110,9 @@ class WebSource(CopySource): :param max_concurrent_connections: The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). :type max_concurrent_connections: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] """ _validation = { @@ -36205,6 +40125,7 @@ class WebSource(CopySource): 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__( @@ -36214,10 +40135,12 @@ def __init__( source_retry_count: Optional[object] = None, source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'WebSource' # type: str + self.additional_columns = additional_columns class WebTableDataset(Dataset): @@ -36605,6 +40528,9 @@ class XeroLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Xero. It is mutually exclusive with + any other properties in the linked service. Type: object. + :type connection_properties: object :param host: Required. The endpoint of the Xero server. (i.e. api.xero.com). :type host: object :param consumer_key: The consumer key associated with the Xero application. @@ -36641,6 +40567,7 @@ class XeroLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'host': {'key': 'typeProperties.host', 'type': 'object'}, 'consumer_key': {'key': 'typeProperties.consumerKey', 'type': 'SecretBase'}, 'private_key': {'key': 'typeProperties.privateKey', 'type': 'SecretBase'}, @@ -36659,6 +40586,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, consumer_key: Optional["SecretBase"] = None, private_key: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, @@ -36669,6 +40597,7 @@ def __init__( ): super(XeroLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Xero' # type: str + self.connection_properties = connection_properties self.host = host self.consumer_key = consumer_key self.private_key = private_key @@ -36768,6 +40697,9 @@ class XeroSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -36784,6 +40716,7 @@ class XeroSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -36795,14 +40728,263 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'XeroSource' # type: str self.query = query +class XmlDataset(Dataset): + """Xml dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~azure.synapse.artifacts.models.DatasetFolder + :param location: The location of the json data storage. + :type location: ~azure.synapse.artifacts.models.DatasetLocation + :param encoding_name: The code page name of the preferred encoding. If not specified, the + default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column + of the table in the following link to set supported values: + https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with + resultType string). + :type encoding_name: object + :param null_value: The null value string. Type: string (or Expression with resultType string). + :type null_value: object + :param compression: The data compression method used for the json dataset. + :type compression: ~azure.synapse.artifacts.models.DatasetCompression + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'location': {'key': 'typeProperties.location', 'type': 'DatasetLocation'}, + 'encoding_name': {'key': 'typeProperties.encodingName', 'type': 'object'}, + 'null_value': {'key': 'typeProperties.nullValue', 'type': 'object'}, + 'compression': {'key': 'typeProperties.compression', 'type': 'DatasetCompression'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + location: Optional["DatasetLocation"] = None, + encoding_name: Optional[object] = None, + null_value: Optional[object] = None, + compression: Optional["DatasetCompression"] = None, + **kwargs + ): + super(XmlDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'Xml' # type: str + self.location = location + self.encoding_name = encoding_name + self.null_value = null_value + self.compression = compression + + +class XmlReadSettings(FormatReadSettings): + """Xml read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The read setting type.Constant filled by server. + :type type: str + :param compression_properties: Compression settings. + :type compression_properties: ~azure.synapse.artifacts.models.CompressionReadSettings + :param validation_mode: Indicates what validation method is used when reading the xml files. + Allowed values: 'none', 'xsd', or 'dtd'. Type: string (or Expression with resultType string). + :type validation_mode: object + :param detect_data_type: Indicates whether type detection is enabled when reading the xml + files. Type: boolean (or Expression with resultType boolean). + :type detect_data_type: object + :param namespaces: Indicates whether namespace is enabled when reading the xml files. Type: + boolean (or Expression with resultType boolean). + :type namespaces: object + :param namespace_prefixes: Namespace uri to prefix mappings to override the prefixes in column + names when namespace is enabled, if no prefix is defined for a namespace uri, the prefix of xml + element/attribute name in the xml data file will be used. Example: + "{"http://www.example.com/xml":"prefix"}" Type: object (or Expression with resultType object). + :type namespace_prefixes: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'compression_properties': {'key': 'compressionProperties', 'type': 'CompressionReadSettings'}, + 'validation_mode': {'key': 'validationMode', 'type': 'object'}, + 'detect_data_type': {'key': 'detectDataType', 'type': 'object'}, + 'namespaces': {'key': 'namespaces', 'type': 'object'}, + 'namespace_prefixes': {'key': 'namespacePrefixes', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + compression_properties: Optional["CompressionReadSettings"] = None, + validation_mode: Optional[object] = None, + detect_data_type: Optional[object] = None, + namespaces: Optional[object] = None, + namespace_prefixes: Optional[object] = None, + **kwargs + ): + super(XmlReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'XmlReadSettings' # type: str + self.compression_properties = compression_properties + self.validation_mode = validation_mode + self.detect_data_type = detect_data_type + self.namespaces = namespaces + self.namespace_prefixes = namespace_prefixes + + +class XmlSource(CopySource): + """A copy activity Xml source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param store_settings: Xml store settings. + :type store_settings: ~azure.synapse.artifacts.models.StoreReadSettings + :param format_settings: Xml format settings. + :type format_settings: ~azure.synapse.artifacts.models.XmlReadSettings + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'XmlReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + store_settings: Optional["StoreReadSettings"] = None, + format_settings: Optional["XmlReadSettings"] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, + **kwargs + ): + super(XmlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'XmlSource' # type: str + self.store_settings = store_settings + self.format_settings = format_settings + self.additional_columns = additional_columns + + +class ZipDeflateReadSettings(CompressionReadSettings): + """The ZipDeflate compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_zip_file_name_as_folder: Preserve the zip file name as folder path. Type: + boolean (or Expression with resultType boolean). + :type preserve_zip_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_zip_file_name_as_folder': {'key': 'preserveZipFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + preserve_zip_file_name_as_folder: Optional[object] = None, + **kwargs + ): + super(ZipDeflateReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'ZipDeflateReadSettings' # type: str + self.preserve_zip_file_name_as_folder = preserve_zip_file_name_as_folder + + class ZohoLinkedService(LinkedService): """Zoho server linked service. @@ -36821,6 +41003,9 @@ class ZohoLinkedService(LinkedService): :type parameters: dict[str, ~azure.synapse.artifacts.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Zoho. It is mutually exclusive with + any other properties in the linked service. Type: object. + :type connection_properties: object :param endpoint: Required. The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private). :type endpoint: object :param access_token: The access token for Zoho authentication. @@ -36853,6 +41038,7 @@ class ZohoLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, 'use_encrypted_endpoints': {'key': 'typeProperties.useEncryptedEndpoints', 'type': 'object'}, @@ -36870,6 +41056,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, access_token: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, use_host_verification: Optional[object] = None, @@ -36879,6 +41066,7 @@ def __init__( ): super(ZohoLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Zoho' # type: str + self.connection_properties = connection_properties self.endpoint = endpoint self.access_token = access_token self.use_encrypted_endpoints = use_encrypted_endpoints @@ -36977,6 +41165,9 @@ class ZohoSource(TabularSource): :param query_timeout: Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~azure.synapse.artifacts.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -36993,6 +41184,7 @@ class ZohoSource(TabularSource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -37004,9 +41196,10 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, **kwargs ): - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'ZohoSource' # type: str self.query = query From e5a6ee649a78aaf1e260b1ea030ea6e5d5bd6118 Mon Sep 17 00:00:00 2001 From: Xiang Yan Date: Wed, 31 Mar 2021 10:13:39 -0700 Subject: [PATCH 2/3] update --- .../azure/synapse/artifacts/models/__init__.py | 2 -- .../artifacts/models/_artifacts_client_enums.py | 12 ------------ .../azure/synapse/artifacts/models/_models.py | 2 +- .../azure/synapse/artifacts/models/_models_py3.py | 4 ++-- 4 files changed, 3 insertions(+), 17 deletions(-) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py index 5f143cc9aa5f..90538c01f733 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/__init__.py @@ -1336,7 +1336,6 @@ DatasetCompressionLevel, DatasetReferenceType, DayOfWeek, - DaysOfWeek, Db2AuthenticationType, DependencyCondition, DynamicsAuthenticationType, @@ -2096,7 +2095,6 @@ 'DatasetCompressionLevel', 'DatasetReferenceType', 'DayOfWeek', - 'DaysOfWeek', 'Db2AuthenticationType', 'DependencyCondition', 'DynamicsAuthenticationType', diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py index 8316033e8e61..5259f055fbc7 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_artifacts_client_enums.py @@ -136,18 +136,6 @@ class DatasetReferenceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): DATASET_REFERENCE = "DatasetReference" class DayOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The days of the week. - """ - - SUNDAY = "Sunday" - MONDAY = "Monday" - TUESDAY = "Tuesday" - WEDNESDAY = "Wednesday" - THURSDAY = "Thursday" - FRIDAY = "Friday" - SATURDAY = "Saturday" - -class DaysOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): SUNDAY = "Sunday" MONDAY = "Monday" diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py index c35b797b9a06..c878b656da81 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py @@ -25409,7 +25409,7 @@ class RecurrenceSchedule(msrest.serialization.Model): :param hours: The hours. :type hours: list[int] :param week_days: The days of the week. - :type week_days: list[str or ~azure.synapse.artifacts.models.DaysOfWeek] + :type week_days: list[str or ~azure.synapse.artifacts.models.DayOfWeek] :param month_days: The month days. :type month_days: list[int] :param monthly_occurrences: The monthly occurrences. diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py index 8f4bd00d92bd..d7dcccd0a1ef 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py @@ -29111,7 +29111,7 @@ class RecurrenceSchedule(msrest.serialization.Model): :param hours: The hours. :type hours: list[int] :param week_days: The days of the week. - :type week_days: list[str or ~azure.synapse.artifacts.models.DaysOfWeek] + :type week_days: list[str or ~azure.synapse.artifacts.models.DayOfWeek] :param month_days: The month days. :type month_days: list[int] :param monthly_occurrences: The monthly occurrences. @@ -29133,7 +29133,7 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, minutes: Optional[List[int]] = None, hours: Optional[List[int]] = None, - week_days: Optional[List[Union[str, "DaysOfWeek"]]] = None, + week_days: Optional[List[Union[str, "DayOfWeek"]]] = None, month_days: Optional[List[int]] = None, monthly_occurrences: Optional[List["RecurrenceScheduleOccurrence"]] = None, **kwargs From be1621f8a7397d9ac68c55e4fd00b12cf7a02c39 Mon Sep 17 00:00:00 2001 From: Xiang Yan Date: Thu, 1 Apr 2021 15:59:47 -0700 Subject: [PATCH 3/3] update --- .../azure/synapse/artifacts/models/_models.py | 5 +++-- .../azure/synapse/artifacts/models/_models_py3.py | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py index c878b656da81..f306a86415b9 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models.py @@ -13711,7 +13711,7 @@ class ExecuteDataFlowActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param dataflow: Data flow reference. + :param dataflow: Required. Data flow reference. :type dataflow: ~azure.synapse.artifacts.models.DataFlowReference :param staging: Staging info for execute data flow activity. :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo @@ -13734,6 +13734,7 @@ class ExecuteDataFlowActivity(ExecutionActivity): _validation = { 'name': {'required': True}, 'type': {'required': True}, + 'dataflow': {'required': True}, } _attribute_map = { @@ -13760,7 +13761,7 @@ def __init__( ): super(ExecuteDataFlowActivity, self).__init__(**kwargs) self.type = 'ExecuteDataFlow' # type: str - self.dataflow = kwargs.get('dataflow', None) + self.dataflow = kwargs['dataflow'] self.staging = kwargs.get('staging', None) self.integration_runtime = kwargs.get('integration_runtime', None) self.compute = kwargs.get('compute', None) diff --git a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py index d7dcccd0a1ef..79d64570e0c9 100644 --- a/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py +++ b/sdk/synapse/azure-synapse-artifacts/azure/synapse/artifacts/models/_models_py3.py @@ -15683,7 +15683,7 @@ class ExecuteDataFlowActivity(ExecutionActivity): :type linked_service_name: ~azure.synapse.artifacts.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~azure.synapse.artifacts.models.ActivityPolicy - :param dataflow: Data flow reference. + :param dataflow: Required. Data flow reference. :type dataflow: ~azure.synapse.artifacts.models.DataFlowReference :param staging: Staging info for execute data flow activity. :type staging: ~azure.synapse.artifacts.models.DataFlowStagingInfo @@ -15706,6 +15706,7 @@ class ExecuteDataFlowActivity(ExecutionActivity): _validation = { 'name': {'required': True}, 'type': {'required': True}, + 'dataflow': {'required': True}, } _attribute_map = { @@ -15730,13 +15731,13 @@ def __init__( self, *, name: str, + dataflow: "DataFlowReference", additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, - dataflow: Optional["DataFlowReference"] = None, staging: Optional["DataFlowStagingInfo"] = None, integration_runtime: Optional["IntegrationRuntimeReference"] = None, compute: Optional["ExecuteDataFlowActivityTypePropertiesCompute"] = None,