From d8f499aa43b56c16630cc05acc42178573d3e37e Mon Sep 17 00:00:00 2001 From: Jose Manuel Heredia Hidalgo Date: Tue, 5 Oct 2021 15:09:57 -0500 Subject: [PATCH] [Synapse Artifacts] Re generate for October Release (#17981) * Update synapse artifacts * Undo recording changes * Update changelog * update recordings * Update changelog * Remove .deb file --- sdk/synapse/synapse-artifacts/CHANGELOG.md | 13 +- ...ding_should_get_a_bigdatapool_by_name.json | 4 +- .../recording_should_list_bigdatapools.json | 4 +- .../recording_should_create_dataflow.json | 24 +- .../recording_should_delete_dataflow.json | 20 +- .../recording_should_get_dataflow.json | 4 +- .../recording_should_list_dataflows.json | 4 +- .../recording_should_rename_dataflow.json | 28 +- ...ording_should_get_integrationruntimes.json | 4 +- ...rding_should_list_integrationruntimes.json | 4 +- .../recording_should_create_library.json | 16 +- .../recording_should_delete_library.json | 16 +- .../library/recording_should_get_library.json | 4 +- .../recording_should_list_library.json | 4 +- .../recording_should_create_dataflow.js | 6 +- .../recording_should_delete_dataflow.js | 6 +- .../recording_should_rename_dataflow.js | 18 +- .../recording_should_create_library.js | 8 +- .../recording_should_delete_library.js | 6 +- .../review/synapse-artifacts.api.md | 1291 +- .../synapse-artifacts/src/artifactsClient.ts | 112 +- .../src/artifactsClientContext.ts | 4 - .../synapse-artifacts/src/models/index.ts | 9317 +++++----- .../synapse-artifacts/src/models/mappers.ts | 14822 ++++++++-------- .../src/models/parameters.ts | 427 +- .../src/operations/bigDataPools.ts | 6 +- .../src/operations/dataFlowDebugSession.ts | 27 +- .../src/operations/dataFlowOperations.ts | 93 +- .../src/operations/datasetOperations.ts | 93 +- .../synapse-artifacts/src/operations/index.ts | 26 +- .../src/operations/integrationRuntimes.ts | 6 +- .../src/operations/kqlScriptOperations.ts | 428 + .../src/operations/kqlScripts.ts | 166 + .../src/operations/library.ts | 35 +- .../src/operations/linkedServiceOperations.ts | 95 +- .../src/operations/notebookOperationResult.ts | 76 + .../src/operations/notebookOperations.ts | 129 +- .../src/operations/pipelineOperations.ts | 107 +- .../src/operations/pipelineRunOperations.ts | 52 +- .../sparkConfigurationOperations.ts | 589 + .../sparkJobDefinitionOperations.ts | 147 +- .../src/operations/sqlPools.ts | 6 +- .../src/operations/sqlScriptOperations.ts | 93 +- .../src/operations/triggerOperations.ts | 161 +- .../src/operations/triggerRunOperations.ts | 32 +- .../operations/workspaceGitRepoManagement.ts | 4 +- .../src/operations/workspaceOperations.ts | 15 +- .../dataFlowDebugSession.ts | 1 - .../dataFlowOperations.ts | 39 +- .../operationsInterfaces/datasetOperations.ts | 39 +- .../src/operationsInterfaces/index.ts | 26 +- .../kqlScriptOperations.ts | 99 + .../src/operationsInterfaces/kqlScripts.ts | 22 + .../src/operationsInterfaces/library.ts | 1 - .../linkedServiceOperations.ts | 41 +- .../notebookOperationResult.ts | 22 + .../notebookOperations.ts | 43 +- .../pipelineOperations.ts | 47 +- .../pipelineRunOperations.ts | 28 +- .../sparkConfigurationOperations.ts | 111 + .../sparkJobDefinitionOperations.ts | 73 +- .../sqlScriptOperations.ts | 39 +- .../operationsInterfaces/triggerOperations.ts | 81 +- .../triggerRunOperations.ts | 16 +- .../workspaceOperations.ts | 9 +- .../synapse-artifacts/swagger/README.md | 3 +- 66 files changed, 16047 insertions(+), 13245 deletions(-) create mode 100644 sdk/synapse/synapse-artifacts/src/operations/kqlScriptOperations.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/kqlScripts.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/notebookOperationResult.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/sparkConfigurationOperations.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/kqlScriptOperations.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/kqlScripts.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/notebookOperationResult.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkConfigurationOperations.ts diff --git a/sdk/synapse/synapse-artifacts/CHANGELOG.md b/sdk/synapse/synapse-artifacts/CHANGELOG.md index a3def7c69f59..7a6b17426bfa 100644 --- a/sdk/synapse/synapse-artifacts/CHANGELOG.md +++ b/sdk/synapse/synapse-artifacts/CHANGELOG.md @@ -1,14 +1,15 @@ # Release History -## 1.0.0-beta.6 (Unreleased) +## 1.0.0-beta.6 (2021-10-05) -### Features Added - -### Breaking Changes +### Other Changes -### Bugs Fixed +- Upgrade to [package-artifacts-composite-v1](https://github.com/Azure/azure-rest-api-specs/blob/bee724836ffdeb5458274037dc75f4d43576b5e3/specification/synapse/data-plane/readme.md#tag-package-artifacts-composite-v1) -### Other Changes +- Added `SparkConfiguration`, `KqlScripts` and associated support types. +- Update type of many modles from string to object +- `SparkJobDefinition`, `Notebook` and `SqlScript` now has a folder property +- `SqlConnection` now has poolName and databaseName properties ## 1.0.0-beta.5 (2021-08-10) diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_get_a_bigdatapool_by_name.json b/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_get_a_bigdatapool_by_name.json index 3eb8730e7ad0..e9e033ac884d 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_get_a_bigdatapool_by_name.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_get_a_bigdatapool_by_name.json @@ -28,7 +28,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/bigDataPools/testsparkpool", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -49,4 +49,4 @@ "newDate": {} }, "hash": "985a433417c3edcefd1553ea2fd608fd" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_list_bigdatapools.json b/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_list_bigdatapools.json index 8f1da9b0e12b..4f5630214552 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_list_bigdatapools.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/bigdatapools/recording_should_list_bigdatapools.json @@ -28,7 +28,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/bigDataPools", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -49,4 +49,4 @@ "newDate": {} }, "hash": "c4719dc92a7c9d77e944b35c69a64933" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_create_dataflow.json b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_create_dataflow.json index 24343adaa005..0bfb4c05a758 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_create_dataflow.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_create_dataflow.json @@ -28,7 +28,7 @@ "method": "PUT", "url": "https://testaccount.dev.azuresynapse.net/dataflows/testdataflow", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": "{\"properties\":{\"type\":\"MappingDataFlow\"}}", "status": 202, @@ -39,7 +39,7 @@ "content-length": "425", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:40:59 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -51,7 +51,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -62,7 +62,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:40:59 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -74,7 +74,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -85,7 +85,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:01 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -97,7 +97,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -108,7 +108,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:04 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -120,7 +120,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -131,7 +131,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:05 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -143,7 +143,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/31a301d6-71fc-4e6d-b13f-db4d423d69f8", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -170,4 +170,4 @@ "newDate": {} }, "hash": "c446fd789bba8943154fc27cc55a965c" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_delete_dataflow.json b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_delete_dataflow.json index a4f70c8b086e..9081f68beeb3 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_delete_dataflow.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_delete_dataflow.json @@ -28,7 +28,7 @@ "method": "DELETE", "url": "https://testaccount.dev.azuresynapse.net/dataflows/testdataflow2", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -39,7 +39,7 @@ "content-length": "351", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:27 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/381ee057-07a7-40ed-adab-007bc434d0b4?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/381ee057-07a7-40ed-adab-007bc434d0b4?api-version=2020-12-01", "server": "Microsoft-HTTPAPI/2.0", "status": "202", "strict-transport-security": "max-age=31536000; includeSubDomains", @@ -50,7 +50,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/381ee057-07a7-40ed-adab-007bc434d0b4", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -61,7 +61,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:27 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/381ee057-07a7-40ed-adab-007bc434d0b4?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/381ee057-07a7-40ed-adab-007bc434d0b4?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -73,7 +73,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/381ee057-07a7-40ed-adab-007bc434d0b4", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -84,7 +84,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:29 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/381ee057-07a7-40ed-adab-007bc434d0b4?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/381ee057-07a7-40ed-adab-007bc434d0b4?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -96,7 +96,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/381ee057-07a7-40ed-adab-007bc434d0b4", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -107,7 +107,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:31 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/381ee057-07a7-40ed-adab-007bc434d0b4?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/381ee057-07a7-40ed-adab-007bc434d0b4?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -119,7 +119,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/381ee057-07a7-40ed-adab-007bc434d0b4", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -139,4 +139,4 @@ "newDate": {} }, "hash": "7ab568f79fee2a97aa781d49d16db514" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_get_dataflow.json b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_get_dataflow.json index 5be250b6a1fd..fae6b9f4f062 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_get_dataflow.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_get_dataflow.json @@ -28,7 +28,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/dataflows/testdataflow", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -55,4 +55,4 @@ "newDate": {} }, "hash": "4d8c8c8d635ea267ab31dc8272a0f408" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_list_dataflows.json b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_list_dataflows.json index 875bf01a909c..d399f3086f59 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_list_dataflows.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_list_dataflows.json @@ -28,7 +28,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/dataflows", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -49,4 +49,4 @@ "newDate": {} }, "hash": "1352d707f054182c3ed762040d9f9a76" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_rename_dataflow.json b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_rename_dataflow.json index a5ddcb4ede9a..72bec8813966 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_rename_dataflow.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/dataflow/recording_should_rename_dataflow.json @@ -28,7 +28,7 @@ "method": "POST", "url": "https://testaccount.dev.azuresynapse.net/dataflows/testdataflow/rename", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": "{\"newName\":\"testdataflow2\"}", "status": 202, @@ -39,7 +39,7 @@ "content-length": "425", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:12 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -51,7 +51,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -62,7 +62,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:12 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -74,7 +74,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -85,7 +85,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:14 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -97,7 +97,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -108,7 +108,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:16 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -120,7 +120,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -131,7 +131,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:19 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -143,7 +143,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -154,7 +154,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:21 GMT", - "location": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -166,7 +166,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/operationResults/14dc3164-f3f5-45c2-9fe7-b27755d8f86c", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -186,4 +186,4 @@ "newDate": {} }, "hash": "cd37812c106eb0a91952c0443239994a" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_get_integrationruntimes.json b/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_get_integrationruntimes.json index d6ad519fc5ad..44d00114e3b3 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_get_integrationruntimes.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_get_integrationruntimes.json @@ -28,7 +28,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/integrationRuntimes/AutoResolveIntegrationRuntime", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -55,4 +55,4 @@ "newDate": {} }, "hash": "7770ffcb0b7ecd65f37d4dac617662d9" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_list_integrationruntimes.json b/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_list_integrationruntimes.json index 4c2fcb7fdb37..408b700f6c37 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_list_integrationruntimes.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/integrationruntimes/recording_should_list_integrationruntimes.json @@ -28,7 +28,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/integrationRuntimes", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -55,4 +55,4 @@ "newDate": {} }, "hash": "8037990fcacbab412bc49eba1e5bdef6" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_create_library.json b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_create_library.json index 7ef8bbe44e58..61ce653531fd 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_create_library.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_create_library.json @@ -28,7 +28,7 @@ "method": "PUT", "url": "https://testaccount.dev.azuresynapse.net/libraries/testLibraryName.jar", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -39,7 +39,7 @@ "content-length": "436", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:37 GMT", - "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/606a995d-b95a-410a-95c6-8a929c5e3c72?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/606a995d-b95a-410a-95c6-8a929c5e3c72?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -51,7 +51,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/606a995d-b95a-410a-95c6-8a929c5e3c72", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -62,7 +62,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:37 GMT", - "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/606a995d-b95a-410a-95c6-8a929c5e3c72?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/606a995d-b95a-410a-95c6-8a929c5e3c72?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -74,7 +74,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/606a995d-b95a-410a-95c6-8a929c5e3c72", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -85,7 +85,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:40 GMT", - "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/606a995d-b95a-410a-95c6-8a929c5e3c72?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/606a995d-b95a-410a-95c6-8a929c5e3c72?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -97,7 +97,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/606a995d-b95a-410a-95c6-8a929c5e3c72", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -118,4 +118,4 @@ "newDate": {} }, "hash": "d878684ca2ba8bb8db00360d4a70f8bb" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_delete_library.json b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_delete_library.json index d011a32b8200..ea90bfbe888d 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_delete_library.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_delete_library.json @@ -28,7 +28,7 @@ "method": "DELETE", "url": "https://testaccount.dev.azuresynapse.net/libraries/testLibraryName.jar", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -39,7 +39,7 @@ "content-length": "370", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:46 GMT", - "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/d4185926-bdff-43c4-884e-0f5018388a38?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/d4185926-bdff-43c4-884e-0f5018388a38?api-version=2020-12-01", "server": "Microsoft-HTTPAPI/2.0", "status": "202", "strict-transport-security": "max-age=31536000; includeSubDomains", @@ -50,7 +50,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/d4185926-bdff-43c4-884e-0f5018388a38", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -61,7 +61,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:46 GMT", - "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/d4185926-bdff-43c4-884e-0f5018388a38?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/d4185926-bdff-43c4-884e-0f5018388a38?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -73,7 +73,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/d4185926-bdff-43c4-884e-0f5018388a38", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 202, @@ -84,7 +84,7 @@ "content-length": "23", "content-type": "application/json; charset=utf-8", "date": "Fri, 11 Jun 2021 18:41:49 GMT", - "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/d4185926-bdff-43c4-884e-0f5018388a38?api-version=2019-06-01-preview", + "location": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/d4185926-bdff-43c4-884e-0f5018388a38?api-version=2020-12-01", "retry-after": "10", "server": "Microsoft-HTTPAPI/2.0", "status": "202", @@ -96,7 +96,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/libraryOperationResults/d4185926-bdff-43c4-884e-0f5018388a38", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -116,4 +116,4 @@ "newDate": {} }, "hash": "8305f99fa16e0395a8eda17e79570dd2" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_get_library.json b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_get_library.json index 8d37c4a75eb8..0c4760c14333 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_get_library.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_get_library.json @@ -28,7 +28,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/libraries/testLibraryName.jar", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -49,4 +49,4 @@ "newDate": {} }, "hash": "df6f8ed12a51828a757a2a9d3e75ab90" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_list_library.json b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_list_library.json index cdc54ab26b31..47fe4e7189bf 100644 --- a/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_list_library.json +++ b/sdk/synapse/synapse-artifacts/recordings/browsers/library/recording_should_list_library.json @@ -28,7 +28,7 @@ "method": "GET", "url": "https://testaccount.dev.azuresynapse.net/libraries", "query": { - "api-version": "2019-06-01-preview" + "api-version": "2020-12-01" }, "requestBody": null, "status": 200, @@ -49,4 +49,4 @@ "newDate": {} }, "hash": "bee07bf9e77246e768f8dc9a376ceb27" -} \ No newline at end of file +} diff --git a/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_create_dataflow.js b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_create_dataflow.js index e9e723c0fc90..21b98822ca94 100644 --- a/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_create_dataflow.js +++ b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_create_dataflow.js @@ -119,7 +119,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/operationResults/5f0af254-8877-4f3f-9f2a-d18d314a396b?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/operationResults/5f0af254-8877-4f3f-9f2a-d18d314a396b?api-version=2020-12-01', 'Retry-After', '0', 'Server', @@ -151,7 +151,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/operationResults/5f0af254-8877-4f3f-9f2a-d18d314a396b?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/operationResults/5f0af254-8877-4f3f-9f2a-d18d314a396b?api-version=2020-12-01', 'Retry-After', '0', 'Server', @@ -183,7 +183,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/operationResults/5f0af254-8877-4f3f-9f2a-d18d314a396b?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/operationResults/5f0af254-8877-4f3f-9f2a-d18d314a396b?api-version=2020-12-01', 'Retry-After', '0', 'Server', diff --git a/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_delete_dataflow.js b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_delete_dataflow.js index e8acf2dcec71..f99c8e489a57 100644 --- a/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_delete_dataflow.js +++ b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_delete_dataflow.js @@ -119,7 +119,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/operationResults/c737135f-c70d-4d6f-b385-6a446e7deca6?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/operationResults/c737135f-c70d-4d6f-b385-6a446e7deca6?api-version=2020-12-01', 'Server', 'Microsoft-HTTPAPI/2.0', 'Strict-Transport-Security', @@ -145,7 +145,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/operationResults/c737135f-c70d-4d6f-b385-6a446e7deca6?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/operationResults/c737135f-c70d-4d6f-b385-6a446e7deca6?api-version=2020-12-01', 'Retry-After', '0', 'Server', @@ -177,7 +177,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/operationResults/c737135f-c70d-4d6f-b385-6a446e7deca6?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/operationResults/c737135f-c70d-4d6f-b385-6a446e7deca6?api-version=2020-12-01', 'Retry-After', '0', 'Server', diff --git a/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_rename_dataflow.js b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_rename_dataflow.js index 6bb4473dae6c..bcde09a9aee6 100644 --- a/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_rename_dataflow.js +++ b/sdk/synapse/synapse-artifacts/recordings/node/dataflow/recording_should_rename_dataflow.js @@ -243,7 +243,7 @@ nock("https://testaccount.dev.azuresynapse.net", { encodedQueryParams: true }) "Content-Type", "application/json; charset=utf-8", "Location", - "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2019-06-01-preview", + "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2020-12-01", "Retry-After", "1", "Server", @@ -276,7 +276,7 @@ nock("https://testaccount.dev.azuresynapse.net", { encodedQueryParams: true }) "Content-Type", "application/json; charset=utf-8", "Location", - "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2019-06-01-preview", + "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2020-12-01", "Retry-After", "1", "Server", @@ -308,7 +308,7 @@ nock("https://testaccount.dev.azuresynapse.net", { encodedQueryParams: true }) "Content-Type", "application/json; charset=utf-8", "Location", - "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2019-06-01-preview", + "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2020-12-01", "Retry-After", "1", "Server", @@ -340,7 +340,7 @@ nock("https://testaccount.dev.azuresynapse.net", { encodedQueryParams: true }) "Content-Type", "application/json; charset=utf-8", "Location", - "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2019-06-01-preview", + "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2020-12-01", "Retry-After", "1", "Server", @@ -372,7 +372,7 @@ nock("https://testaccount.dev.azuresynapse.net", { encodedQueryParams: true }) "Content-Type", "application/json; charset=utf-8", "Location", - "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2019-06-01-preview", + "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2020-12-01", "Retry-After", "1", "Server", @@ -404,7 +404,7 @@ nock("https://testaccount.dev.azuresynapse.net", { encodedQueryParams: true }) "Content-Type", "application/json; charset=utf-8", "Location", - "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2019-06-01-preview", + "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2020-12-01", "Retry-After", "1", "Server", @@ -436,7 +436,7 @@ nock("https://testaccount.dev.azuresynapse.net", { encodedQueryParams: true }) "Content-Type", "application/json; charset=utf-8", "Location", - "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2019-06-01-preview", + "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2020-12-01", "Retry-After", "1", "Server", @@ -468,7 +468,7 @@ nock("https://testaccount.dev.azuresynapse.net", { encodedQueryParams: true }) "Content-Type", "application/json; charset=utf-8", "Location", - "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2019-06-01-preview", + "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2020-12-01", "Retry-After", "1", "Server", @@ -500,7 +500,7 @@ nock("https://testaccount.dev.azuresynapse.net", { encodedQueryParams: true }) "Content-Type", "application/json; charset=utf-8", "Location", - "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2019-06-01-preview", + "https://testaccount.dev.azuresynapse.net/operationResults/645d8c0c-a00e-4d40-aca7-4b814e087368?api-version=2020-12-01", "Retry-After", "1", "Server", diff --git a/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_create_library.js b/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_create_library.js index b7ff48c6994f..597c338c7077 100644 --- a/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_create_library.js +++ b/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_create_library.js @@ -119,7 +119,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/1daf9a13-2b8b-40bc-ae5a-e35ed8e8e323?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/1daf9a13-2b8b-40bc-ae5a-e35ed8e8e323?api-version=2020-12-01', 'Retry-After', '0', 'Server', @@ -151,7 +151,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/1daf9a13-2b8b-40bc-ae5a-e35ed8e8e323?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/1daf9a13-2b8b-40bc-ae5a-e35ed8e8e323?api-version=2020-12-01', 'Retry-After', '0', 'Server', @@ -183,7 +183,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/1daf9a13-2b8b-40bc-ae5a-e35ed8e8e323?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/1daf9a13-2b8b-40bc-ae5a-e35ed8e8e323?api-version=2020-12-01', 'Retry-After', '0', 'Server', @@ -215,7 +215,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/1daf9a13-2b8b-40bc-ae5a-e35ed8e8e323?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/1daf9a13-2b8b-40bc-ae5a-e35ed8e8e323?api-version=2020-12-01', 'Retry-After', '0', 'Server', diff --git a/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_delete_library.js b/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_delete_library.js index c46cad82be8f..d7604b553f5f 100644 --- a/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_delete_library.js +++ b/sdk/synapse/synapse-artifacts/recordings/node/library/recording_should_delete_library.js @@ -119,7 +119,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/9bd185ed-8f5d-4e11-949b-28334b279679?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/9bd185ed-8f5d-4e11-949b-28334b279679?api-version=2020-12-01', 'Server', 'Microsoft-HTTPAPI/2.0', 'Strict-Transport-Security', @@ -145,7 +145,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/9bd185ed-8f5d-4e11-949b-28334b279679?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/9bd185ed-8f5d-4e11-949b-28334b279679?api-version=2020-12-01', 'Retry-After', '0', 'Server', @@ -177,7 +177,7 @@ nock('https://testaccount.dev.azuresynapse.net', {"encodedQueryParams":true}) 'Content-Type', 'application/json; charset=utf-8', 'Location', - 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/9bd185ed-8f5d-4e11-949b-28334b279679?api-version=2019-06-01-preview', + 'https://testaccount.dev.azuresynapse.net/libraryOperationResults/9bd185ed-8f5d-4e11-949b-28334b279679?api-version=2020-12-01', 'Retry-After', '0', 'Server', diff --git a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md index 8039ccef43fd..3363acefb5f5 100644 --- a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md +++ b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md @@ -103,6 +103,71 @@ export type AmazonMWSSource = TabularSource & { query?: any; }; +// @public +export type AmazonRdsForOracleLinkedService = LinkedService & { + type: "AmazonRdsForOracle"; + connectionString: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type AmazonRdsForOraclePartitionOption = string; + +// @public +export interface AmazonRdsForOraclePartitionSettings { + partitionColumnName?: any; + partitionLowerBound?: any; + partitionNames?: any; + partitionUpperBound?: any; +} + +// @public +export type AmazonRdsForOracleSource = CopySource & { + type: "AmazonRdsForOracleSource"; + oracleReaderQuery?: any; + queryTimeout?: any; + partitionOption?: any; + partitionSettings?: AmazonRdsForOraclePartitionSettings; + additionalColumns?: any; +}; + +// @public +export type AmazonRdsForOracleTableDataset = Dataset & { + type: "AmazonRdsForOracleTable"; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type AmazonRdsForSqlServerLinkedService = LinkedService & { + type: "AmazonRdsForSqlServer"; + connectionString: any; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type AmazonRdsForSqlServerSource = TabularSource & { + type: "AmazonRdsForSqlServerSource"; + sqlReaderQuery?: any; + sqlReaderStoredProcedureName?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + produceAdditionalTypes?: any; + partitionOption?: any; + partitionSettings?: SqlPartitionSettings; +}; + +// @public +export type AmazonRdsForSqlServerTableDataset = Dataset & { + type: "AmazonRdsForSqlServerTable"; + schemaTypePropertiesSchema?: any; + table?: any; +}; + // @public export type AmazonRedshiftLinkedService = LinkedService & { type: "AmazonRedshift"; @@ -201,16 +266,24 @@ export class ArtifactsClient extends ArtifactsClientContext { // (undocumented) integrationRuntimes: IntegrationRuntimes; // (undocumented) + kqlScriptOperations: KqlScriptOperations; + // (undocumented) + kqlScripts: KqlScripts; + // (undocumented) library: Library; // (undocumented) linkedServiceOperations: LinkedServiceOperations; // (undocumented) + notebookOperationResult: NotebookOperationResult; + // (undocumented) notebookOperations: NotebookOperations; // (undocumented) pipelineOperations: PipelineOperations; // (undocumented) pipelineRunOperations: PipelineRunOperations; // (undocumented) + sparkConfigurationOperations: SparkConfigurationOperations; + // (undocumented) sparkJobDefinitionOperations: SparkJobDefinitionOperations; // (undocumented) sqlPools: SqlPools; @@ -230,14 +303,11 @@ export class ArtifactsClient extends ArtifactsClientContext { export class ArtifactsClientContext extends coreClient.ServiceClient { constructor(credentials: coreAuth.TokenCredential, endpoint: string, options?: ArtifactsClientOptionalParams); // (undocumented) - apiVersion: string; - // (undocumented) endpoint: string; } // @public export interface ArtifactsClientOptionalParams extends coreClient.ServiceClientOptions { - apiVersion?: string; endpoint?: string; } @@ -281,7 +351,7 @@ export type AvroSink = CopySink & { export type AvroSource = CopySource & { type: "AvroSource"; storeSettings?: StoreReadSettingsUnion; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -389,6 +459,7 @@ export type AzureBlobStorageLinkedService = LinkedService & { servicePrincipalKey?: SecretBaseUnion; tenant?: any; azureCloudType?: any; + accountKind?: string; encryptedCredential?: string; }; @@ -503,10 +574,10 @@ export type AzureDataExplorerCommandActivity = ExecutionActivity & { export type AzureDataExplorerLinkedService = LinkedService & { type: "AzureDataExplorer"; endpoint: any; - servicePrincipalId: any; - servicePrincipalKey: SecretBaseUnion; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; database: any; - tenant: any; + tenant?: any; }; // @public @@ -523,7 +594,7 @@ export type AzureDataExplorerSource = CopySource & { query: any; noTruncation?: any; queryTimeout?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -1148,6 +1219,14 @@ export interface CloudError { target?: string; } +// @public +export interface CloudErrorAutoGenerated { + code: string; + details?: CloudErrorAutoGenerated[]; + message: string; + target?: string; +} + // @public export type CommonDataServiceForAppsEntityDataset = Dataset & { type: "CommonDataServiceForAppsEntity"; @@ -1157,12 +1236,12 @@ export type CommonDataServiceForAppsEntityDataset = Dataset & { // @public export type CommonDataServiceForAppsLinkedService = LinkedService & { type: "CommonDataServiceForApps"; - deploymentType: DynamicsDeploymentType; + deploymentType: any; hostName?: any; port?: any; serviceUri?: any; organizationName?: any; - authenticationType: DynamicsAuthenticationType; + authenticationType: any; username?: any; password?: SecretBaseUnion; servicePrincipalId?: any; @@ -1183,7 +1262,7 @@ export type CommonDataServiceForAppsSink = CopySink & { export type CommonDataServiceForAppsSource = CopySource & { type: "CommonDataServiceForAppsSource"; query?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -1282,11 +1361,11 @@ export interface CopySource { maxConcurrentConnections?: any; sourceRetryCount?: any; sourceRetryWait?: any; - type: "AvroSource" | "ExcelSource" | "ParquetSource" | "DelimitedTextSource" | "JsonSource" | "XmlSource" | "OrcSource" | "BinarySource" | "TabularSource" | "AzureTableSource" | "BlobSource" | "DocumentDbCollectionSource" | "CosmosDbSqlApiSource" | "DynamicsSource" | "DynamicsCrmSource" | "CommonDataServiceForAppsSource" | "RelationalSource" | "InformixSource" | "MicrosoftAccessSource" | "Db2Source" | "OdbcSource" | "MySqlSource" | "PostgreSqlSource" | "SybaseSource" | "SapBwSource" | "ODataSource" | "SalesforceSource" | "SalesforceServiceCloudSource" | "SapCloudForCustomerSource" | "SapEccSource" | "SapHanaSource" | "SapOpenHubSource" | "SapTableSource" | "RestSource" | "SqlSource" | "SqlServerSource" | "AzureSqlSource" | "SqlMISource" | "SqlDWSource" | "FileSystemSource" | "HdfsSource" | "AzureMySqlSource" | "AzureDataExplorerSource" | "OracleSource" | "TeradataSource" | "WebSource" | "CassandraSource" | "MongoDbSource" | "MongoDbAtlasSource" | "MongoDbV2Source" | "CosmosDbMongoDbApiSource" | "Office365Source" | "AzureDataLakeStoreSource" | "AzureBlobFSSource" | "HttpSource" | "AmazonMWSSource" | "AzurePostgreSqlSource" | "ConcurSource" | "CouchbaseSource" | "DrillSource" | "EloquaSource" | "GoogleBigQuerySource" | "GreenplumSource" | "HBaseSource" | "HiveSource" | "HubspotSource" | "ImpalaSource" | "JiraSource" | "MagentoSource" | "MariaDBSource" | "AzureMariaDBSource" | "MarketoSource" | "PaypalSource" | "PhoenixSource" | "PrestoSource" | "QuickBooksSource" | "ServiceNowSource" | "ShopifySource" | "SparkSource" | "SquareSource" | "XeroSource" | "ZohoSource" | "NetezzaSource" | "VerticaSource" | "SalesforceMarketingCloudSource" | "ResponsysSource" | "DynamicsAXSource" | "OracleServiceCloudSource" | "GoogleAdWordsSource" | "AmazonRedshiftSource" | "SnowflakeSource" | "AzureDatabricksDeltaLakeSource" | "SharePointOnlineListSource"; + type: "AvroSource" | "ExcelSource" | "ParquetSource" | "DelimitedTextSource" | "JsonSource" | "XmlSource" | "OrcSource" | "BinarySource" | "TabularSource" | "AzureTableSource" | "BlobSource" | "DocumentDbCollectionSource" | "CosmosDbSqlApiSource" | "DynamicsSource" | "DynamicsCrmSource" | "CommonDataServiceForAppsSource" | "RelationalSource" | "InformixSource" | "MicrosoftAccessSource" | "Db2Source" | "OdbcSource" | "MySqlSource" | "PostgreSqlSource" | "SybaseSource" | "SapBwSource" | "ODataSource" | "SalesforceSource" | "SalesforceServiceCloudSource" | "SapCloudForCustomerSource" | "SapEccSource" | "SapHanaSource" | "SapOpenHubSource" | "SapTableSource" | "RestSource" | "SqlSource" | "SqlServerSource" | "AmazonRdsForSqlServerSource" | "AzureSqlSource" | "SqlMISource" | "SqlDWSource" | "FileSystemSource" | "HdfsSource" | "AzureMySqlSource" | "AzureDataExplorerSource" | "OracleSource" | "AmazonRdsForOracleSource" | "TeradataSource" | "WebSource" | "CassandraSource" | "MongoDbSource" | "MongoDbAtlasSource" | "MongoDbV2Source" | "CosmosDbMongoDbApiSource" | "Office365Source" | "AzureDataLakeStoreSource" | "AzureBlobFSSource" | "HttpSource" | "AmazonMWSSource" | "AzurePostgreSqlSource" | "ConcurSource" | "CouchbaseSource" | "DrillSource" | "EloquaSource" | "GoogleBigQuerySource" | "GreenplumSource" | "HBaseSource" | "HiveSource" | "HubspotSource" | "ImpalaSource" | "JiraSource" | "MagentoSource" | "MariaDBSource" | "AzureMariaDBSource" | "MarketoSource" | "PaypalSource" | "PhoenixSource" | "PrestoSource" | "QuickBooksSource" | "ServiceNowSource" | "ShopifySource" | "SparkSource" | "SquareSource" | "XeroSource" | "ZohoSource" | "NetezzaSource" | "VerticaSource" | "SalesforceMarketingCloudSource" | "ResponsysSource" | "DynamicsAXSource" | "OracleServiceCloudSource" | "GoogleAdWordsSource" | "AmazonRedshiftSource" | "SnowflakeSource" | "AzureDatabricksDeltaLakeSource" | "SharePointOnlineListSource"; } // @public (undocumented) -export type CopySourceUnion = CopySource | AvroSource | ExcelSource | ParquetSource | DelimitedTextSource | JsonSource | XmlSource | OrcSource | BinarySource | TabularSourceUnion | BlobSource | DocumentDbCollectionSource | CosmosDbSqlApiSource | DynamicsSource | DynamicsCrmSource | CommonDataServiceForAppsSource | RelationalSource | MicrosoftAccessSource | ODataSource | SalesforceServiceCloudSource | RestSource | FileSystemSource | HdfsSource | AzureDataExplorerSource | OracleSource | WebSource | MongoDbSource | MongoDbAtlasSource | MongoDbV2Source | CosmosDbMongoDbApiSource | Office365Source | AzureDataLakeStoreSource | AzureBlobFSSource | HttpSource | SnowflakeSource | AzureDatabricksDeltaLakeSource | SharePointOnlineListSource; +export type CopySourceUnion = CopySource | AvroSource | ExcelSource | ParquetSource | DelimitedTextSource | JsonSource | XmlSource | OrcSource | BinarySource | TabularSourceUnion | BlobSource | DocumentDbCollectionSource | CosmosDbSqlApiSource | DynamicsSource | DynamicsCrmSource | CommonDataServiceForAppsSource | RelationalSource | MicrosoftAccessSource | ODataSource | SalesforceServiceCloudSource | RestSource | FileSystemSource | HdfsSource | AzureDataExplorerSource | OracleSource | AmazonRdsForOracleSource | WebSource | MongoDbSource | MongoDbAtlasSource | MongoDbV2Source | CosmosDbMongoDbApiSource | Office365Source | AzureDataLakeStoreSource | AzureBlobFSSource | HttpSource | SnowflakeSource | AzureDatabricksDeltaLakeSource | SharePointOnlineListSource; // @public export interface CopyTranslator { @@ -1333,7 +1412,7 @@ export type CosmosDbMongoDbApiSource = CopySource & { cursorMethods?: MongoDbCursorMethodsProperties; batchSize?: any; queryTimeout?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -1355,7 +1434,7 @@ export type CosmosDbSqlApiSource = CopySource & { pageSize?: any; preferredRegions?: any; detectDatetime?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -1380,12 +1459,10 @@ export type CouchbaseTableDataset = Dataset & { // @public export interface CreateDataFlowDebugSessionRequest { - clusterTimeout?: number; - dataBricksLinkedService?: LinkedServiceResource; - dataFlowName?: string; - existingClusterId?: string; - newClusterName?: string; - newClusterNodeType?: string; + computeType?: string; + coreCount?: number; + integrationRuntime?: IntegrationRuntimeDebugResource; + timeToLive?: number; } // @public @@ -1493,12 +1570,29 @@ export interface DataFlow { // @public export type DataFlowComputeType = string; +// @public +export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreClient.OperationOptions { + ifMatch?: string; + resumeFrom?: string; + updateIntervalInMs?: number; +} + +// @public +export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource; + +// @public +export interface DataFlowDebugCommandPayload { + columns?: string[]; + expression?: string; + rowLimits?: number; + streamName: string; +} + // @public export interface DataFlowDebugCommandRequest { - commandName?: string; - commandPayload: any; - dataFlowName?: string; - sessionId: string; + command?: DataFlowDebugCommandType; + commandPayload?: DataFlowDebugCommandPayload; + sessionId?: string; } // @public @@ -1507,6 +1601,9 @@ export interface DataFlowDebugCommandResponse { status?: string; } +// @public +export type DataFlowDebugCommandType = string; + // @public export interface DataFlowDebugPackage { [property: string]: any; @@ -1638,70 +1735,54 @@ export interface DataFlowDebugStatisticsRequest { } // @public -export interface DataFlowFolder { - name?: string; -} - -// @public -export interface DataFlowListResponse { - nextLink?: string; - value: DataFlowResource[]; -} - -// @public -export interface DataFlowOperations { - beginCreateOrUpdateDataFlow(dataFlowName: string, dataFlow: DataFlowResource, options?: DataFlowOperationsCreateOrUpdateDataFlowOptionalParams): Promise, DataFlowOperationsCreateOrUpdateDataFlowResponse>>; - beginCreateOrUpdateDataFlowAndWait(dataFlowName: string, dataFlow: DataFlowResource, options?: DataFlowOperationsCreateOrUpdateDataFlowOptionalParams): Promise; - beginDeleteDataFlow(dataFlowName: string, options?: DataFlowOperationsDeleteDataFlowOptionalParams): Promise, void>>; - beginDeleteDataFlowAndWait(dataFlowName: string, options?: DataFlowOperationsDeleteDataFlowOptionalParams): Promise; - beginRenameDataFlow(dataFlowName: string, request: ArtifactRenameRequest, options?: DataFlowOperationsRenameDataFlowOptionalParams): Promise, void>>; - beginRenameDataFlowAndWait(dataFlowName: string, request: ArtifactRenameRequest, options?: DataFlowOperationsRenameDataFlowOptionalParams): Promise; - getDataFlow(dataFlowName: string, options?: DataFlowOperationsGetDataFlowOptionalParams): Promise; - listDataFlowsByWorkspace(options?: DataFlowOperationsGetDataFlowsByWorkspaceOptionalParams): PagedAsyncIterableIterator; -} - -// @public -export interface DataFlowOperationsCreateOrUpdateDataFlowOptionalParams extends coreClient.OperationOptions { - ifMatch?: string; +export interface DataFlowDeleteDataFlowOptionalParams extends coreClient.OperationOptions { resumeFrom?: string; updateIntervalInMs?: number; } // @public -export type DataFlowOperationsCreateOrUpdateDataFlowResponse = DataFlowResource; - -// @public -export interface DataFlowOperationsDeleteDataFlowOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; +export interface DataFlowFolder { + name?: string; } // @public -export interface DataFlowOperationsGetDataFlowOptionalParams extends coreClient.OperationOptions { +export interface DataFlowGetDataFlowOptionalParams extends coreClient.OperationOptions { ifNoneMatch?: string; } // @public -export type DataFlowOperationsGetDataFlowResponse = DataFlowResource; +export type DataFlowGetDataFlowResponse = DataFlowResource; // @public -export interface DataFlowOperationsGetDataFlowsByWorkspaceNextOptionalParams extends coreClient.OperationOptions { +export interface DataFlowGetDataFlowsByWorkspaceNextOptionalParams extends coreClient.OperationOptions { } // @public -export type DataFlowOperationsGetDataFlowsByWorkspaceNextResponse = DataFlowListResponse; +export type DataFlowGetDataFlowsByWorkspaceNextResponse = DataFlowListResponse; // @public -export interface DataFlowOperationsGetDataFlowsByWorkspaceOptionalParams extends coreClient.OperationOptions { +export interface DataFlowGetDataFlowsByWorkspaceOptionalParams extends coreClient.OperationOptions { } // @public -export type DataFlowOperationsGetDataFlowsByWorkspaceResponse = DataFlowListResponse; +export type DataFlowGetDataFlowsByWorkspaceResponse = DataFlowListResponse; // @public -export interface DataFlowOperationsRenameDataFlowOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; +export interface DataFlowListResponse { + nextLink?: string; + value: DataFlowResource[]; +} + +// @public +export interface DataFlowOperations { + beginCreateOrUpdateDataFlow(dataFlowName: string, dataFlow: DataFlowResource, options?: DataFlowCreateOrUpdateDataFlowOptionalParams): Promise, DataFlowCreateOrUpdateDataFlowResponse>>; + beginCreateOrUpdateDataFlowAndWait(dataFlowName: string, dataFlow: DataFlowResource, options?: DataFlowCreateOrUpdateDataFlowOptionalParams): Promise; + beginDeleteDataFlow(dataFlowName: string, options?: DataFlowDeleteDataFlowOptionalParams): Promise, void>>; + beginDeleteDataFlowAndWait(dataFlowName: string, options?: DataFlowDeleteDataFlowOptionalParams): Promise; + beginRenameDataFlow(dataFlowName: string, request: ArtifactRenameRequest, options?: DataFlowRenameDataFlowOptionalParams): Promise, void>>; + beginRenameDataFlowAndWait(dataFlowName: string, request: ArtifactRenameRequest, options?: DataFlowRenameDataFlowOptionalParams): Promise; + getDataFlow(dataFlowName: string, options?: DataFlowGetDataFlowOptionalParams): Promise; + listDataFlowsByWorkspace(options?: DataFlowGetDataFlowsByWorkspaceOptionalParams): PagedAsyncIterableIterator; } // @public @@ -1715,6 +1796,12 @@ export interface DataFlowReference { // @public export type DataFlowReferenceType = string; +// @public +export interface DataFlowRenameDataFlowOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + // @public export type DataFlowResource = SubResource & { properties: DataFlowUnion; @@ -1782,7 +1869,7 @@ export interface Dataset { }; schema?: any; structure?: any; - type: "AmazonS3Object" | "Avro" | "Excel" | "Parquet" | "DelimitedText" | "Json" | "Xml" | "Orc" | "Binary" | "AzureBlob" | "AzureTable" | "AzureSqlTable" | "AzureSqlMITable" | "AzureSqlDWTable" | "CassandraTable" | "CustomDataset" | "CosmosDbSqlApiCollection" | "DocumentDbCollection" | "DynamicsEntity" | "DynamicsCrmEntity" | "CommonDataServiceForAppsEntity" | "AzureDataLakeStoreFile" | "AzureBlobFSFile" | "Office365Table" | "FileShare" | "MongoDbCollection" | "MongoDbAtlasCollection" | "MongoDbV2Collection" | "CosmosDbMongoDbApiCollection" | "ODataResource" | "OracleTable" | "TeradataTable" | "AzureMySqlTable" | "AmazonRedshiftTable" | "Db2Table" | "RelationalTable" | "InformixTable" | "OdbcTable" | "MySqlTable" | "PostgreSqlTable" | "MicrosoftAccessTable" | "SalesforceObject" | "SalesforceServiceCloudObject" | "SybaseTable" | "SapBwCube" | "SapCloudForCustomerResource" | "SapEccResource" | "SapHanaTable" | "SapOpenHubTable" | "SqlServerTable" | "RestResource" | "SapTableResource" | "WebTable" | "AzureSearchIndex" | "HttpFile" | "AmazonMWSObject" | "AzurePostgreSqlTable" | "ConcurObject" | "CouchbaseTable" | "DrillTable" | "EloquaObject" | "GoogleBigQueryObject" | "GreenplumTable" | "HBaseObject" | "HiveObject" | "HubspotObject" | "ImpalaObject" | "JiraObject" | "MagentoObject" | "MariaDBTable" | "AzureMariaDBTable" | "MarketoObject" | "PaypalObject" | "PhoenixObject" | "PrestoObject" | "QuickBooksObject" | "ServiceNowObject" | "ShopifyObject" | "SparkObject" | "SquareObject" | "XeroObject" | "ZohoObject" | "NetezzaTable" | "VerticaTable" | "SalesforceMarketingCloudObject" | "ResponsysObject" | "DynamicsAXResource" | "OracleServiceCloudObject" | "AzureDataExplorerTable" | "GoogleAdWordsObject" | "SnowflakeTable" | "SharePointOnlineListResource" | "AzureDatabricksDeltaLakeDataset"; + type: "AmazonS3Object" | "Avro" | "Excel" | "Parquet" | "DelimitedText" | "Json" | "Xml" | "Orc" | "Binary" | "AzureBlob" | "AzureTable" | "AzureSqlTable" | "AzureSqlMITable" | "AzureSqlDWTable" | "CassandraTable" | "CustomDataset" | "CosmosDbSqlApiCollection" | "DocumentDbCollection" | "DynamicsEntity" | "DynamicsCrmEntity" | "CommonDataServiceForAppsEntity" | "AzureDataLakeStoreFile" | "AzureBlobFSFile" | "Office365Table" | "FileShare" | "MongoDbCollection" | "MongoDbAtlasCollection" | "MongoDbV2Collection" | "CosmosDbMongoDbApiCollection" | "ODataResource" | "OracleTable" | "AmazonRdsForOracleTable" | "TeradataTable" | "AzureMySqlTable" | "AmazonRedshiftTable" | "Db2Table" | "RelationalTable" | "InformixTable" | "OdbcTable" | "MySqlTable" | "PostgreSqlTable" | "MicrosoftAccessTable" | "SalesforceObject" | "SalesforceServiceCloudObject" | "SybaseTable" | "SapBwCube" | "SapCloudForCustomerResource" | "SapEccResource" | "SapHanaTable" | "SapOpenHubTable" | "SqlServerTable" | "AmazonRdsForSqlServerTable" | "RestResource" | "SapTableResource" | "WebTable" | "AzureSearchIndex" | "HttpFile" | "AmazonMWSObject" | "AzurePostgreSqlTable" | "ConcurObject" | "CouchbaseTable" | "DrillTable" | "EloquaObject" | "GoogleBigQueryObject" | "GreenplumTable" | "HBaseObject" | "HiveObject" | "HubspotObject" | "ImpalaObject" | "JiraObject" | "MagentoObject" | "MariaDBTable" | "AzureMariaDBTable" | "MarketoObject" | "PaypalObject" | "PhoenixObject" | "PrestoObject" | "QuickBooksObject" | "ServiceNowObject" | "ShopifyObject" | "SparkObject" | "SquareObject" | "XeroObject" | "ZohoObject" | "NetezzaTable" | "VerticaTable" | "SalesforceMarketingCloudObject" | "ResponsysObject" | "DynamicsAXResource" | "OracleServiceCloudObject" | "AzureDataExplorerTable" | "GoogleAdWordsObject" | "SnowflakeTable" | "SharePointOnlineListResource" | "AzureDatabricksDeltaLakeDataset"; } // @public @@ -1802,6 +1889,16 @@ export type DatasetCompressionLevel = string; // @public (undocumented) export type DatasetCompressionUnion = DatasetCompression | DatasetBZip2Compression | DatasetGZipCompression | DatasetDeflateCompression | DatasetZipDeflateCompression | DatasetTarCompression | DatasetTarGZipCompression; +// @public +export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreClient.OperationOptions { + ifMatch?: string; + resumeFrom?: string; + updateIntervalInMs?: number; +} + +// @public +export type DatasetCreateOrUpdateDatasetResponse = DatasetResource; + // @public export interface DatasetDataElement { name?: any; @@ -1820,87 +1917,71 @@ export type DatasetDeflateCompression = DatasetCompression & { }; // @public -export interface DatasetFolder { - name?: string; +export interface DatasetDeleteDatasetOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; } // @public -export type DatasetGZipCompression = DatasetCompression & { - type: "GZip"; - level?: any; -}; - -// @public -export interface DatasetListResponse { - nextLink?: string; - value: DatasetResource[]; +export interface DatasetFolder { + name?: string; } // @public -export interface DatasetLocation { - [property: string]: any; - fileName?: any; - folderPath?: any; - type: "AzureBlobStorageLocation" | "AzureBlobFSLocation" | "AzureDataLakeStoreLocation" | "AmazonS3Location" | "FileServerLocation" | "AzureFileStorageLocation" | "GoogleCloudStorageLocation" | "FtpServerLocation" | "SftpLocation" | "HttpServerLocation" | "HdfsLocation"; +export interface DatasetGetDatasetOptionalParams extends coreClient.OperationOptions { + ifNoneMatch?: string; } -// @public (undocumented) -export type DatasetLocationUnion = DatasetLocation | AzureBlobStorageLocation | AzureBlobFSLocation | AzureDataLakeStoreLocation | AmazonS3Location | FileServerLocation | AzureFileStorageLocation | GoogleCloudStorageLocation | FtpServerLocation | SftpLocation | HttpServerLocation | HdfsLocation; - // @public -export interface DatasetOperations { - beginCreateOrUpdateDataset(datasetName: string, dataset: DatasetResource, options?: DatasetOperationsCreateOrUpdateDatasetOptionalParams): Promise, DatasetOperationsCreateOrUpdateDatasetResponse>>; - beginCreateOrUpdateDatasetAndWait(datasetName: string, dataset: DatasetResource, options?: DatasetOperationsCreateOrUpdateDatasetOptionalParams): Promise; - beginDeleteDataset(datasetName: string, options?: DatasetOperationsDeleteDatasetOptionalParams): Promise, void>>; - beginDeleteDatasetAndWait(datasetName: string, options?: DatasetOperationsDeleteDatasetOptionalParams): Promise; - beginRenameDataset(datasetName: string, request: ArtifactRenameRequest, options?: DatasetOperationsRenameDatasetOptionalParams): Promise, void>>; - beginRenameDatasetAndWait(datasetName: string, request: ArtifactRenameRequest, options?: DatasetOperationsRenameDatasetOptionalParams): Promise; - getDataset(datasetName: string, options?: DatasetOperationsGetDatasetOptionalParams): Promise; - listDatasetsByWorkspace(options?: DatasetOperationsGetDatasetsByWorkspaceOptionalParams): PagedAsyncIterableIterator; -} +export type DatasetGetDatasetResponse = DatasetResource; // @public -export interface DatasetOperationsCreateOrUpdateDatasetOptionalParams extends coreClient.OperationOptions { - ifMatch?: string; - resumeFrom?: string; - updateIntervalInMs?: number; +export interface DatasetGetDatasetsByWorkspaceNextOptionalParams extends coreClient.OperationOptions { } // @public -export type DatasetOperationsCreateOrUpdateDatasetResponse = DatasetResource; +export type DatasetGetDatasetsByWorkspaceNextResponse = DatasetListResponse; // @public -export interface DatasetOperationsDeleteDatasetOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; +export interface DatasetGetDatasetsByWorkspaceOptionalParams extends coreClient.OperationOptions { } // @public -export interface DatasetOperationsGetDatasetOptionalParams extends coreClient.OperationOptions { - ifNoneMatch?: string; -} +export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse; // @public -export type DatasetOperationsGetDatasetResponse = DatasetResource; +export type DatasetGZipCompression = DatasetCompression & { + type: "GZip"; + level?: any; +}; // @public -export interface DatasetOperationsGetDatasetsByWorkspaceNextOptionalParams extends coreClient.OperationOptions { +export interface DatasetListResponse { + nextLink?: string; + value: DatasetResource[]; } // @public -export type DatasetOperationsGetDatasetsByWorkspaceNextResponse = DatasetListResponse; - -// @public -export interface DatasetOperationsGetDatasetsByWorkspaceOptionalParams extends coreClient.OperationOptions { +export interface DatasetLocation { + [property: string]: any; + fileName?: any; + folderPath?: any; + type: "AzureBlobStorageLocation" | "AzureBlobFSLocation" | "AzureDataLakeStoreLocation" | "AmazonS3Location" | "FileServerLocation" | "AzureFileStorageLocation" | "GoogleCloudStorageLocation" | "FtpServerLocation" | "SftpLocation" | "HttpServerLocation" | "HdfsLocation"; } -// @public -export type DatasetOperationsGetDatasetsByWorkspaceResponse = DatasetListResponse; +// @public (undocumented) +export type DatasetLocationUnion = DatasetLocation | AzureBlobStorageLocation | AzureBlobFSLocation | AzureDataLakeStoreLocation | AmazonS3Location | FileServerLocation | AzureFileStorageLocation | GoogleCloudStorageLocation | FtpServerLocation | SftpLocation | HttpServerLocation | HdfsLocation; // @public -export interface DatasetOperationsRenameDatasetOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; +export interface DatasetOperations { + beginCreateOrUpdateDataset(datasetName: string, dataset: DatasetResource, options?: DatasetCreateOrUpdateDatasetOptionalParams): Promise, DatasetCreateOrUpdateDatasetResponse>>; + beginCreateOrUpdateDatasetAndWait(datasetName: string, dataset: DatasetResource, options?: DatasetCreateOrUpdateDatasetOptionalParams): Promise; + beginDeleteDataset(datasetName: string, options?: DatasetDeleteDatasetOptionalParams): Promise, void>>; + beginDeleteDatasetAndWait(datasetName: string, options?: DatasetDeleteDatasetOptionalParams): Promise; + beginRenameDataset(datasetName: string, request: ArtifactRenameRequest, options?: DatasetRenameDatasetOptionalParams): Promise, void>>; + beginRenameDatasetAndWait(datasetName: string, request: ArtifactRenameRequest, options?: DatasetRenameDatasetOptionalParams): Promise; + getDataset(datasetName: string, options?: DatasetGetDatasetOptionalParams): Promise; + listDatasetsByWorkspace(options?: DatasetGetDatasetsByWorkspaceOptionalParams): PagedAsyncIterableIterator; } // @public @@ -1915,6 +1996,12 @@ export interface DatasetReference { // @public export type DatasetReferenceType = string; +// @public +export interface DatasetRenameDatasetOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + // @public export type DatasetResource = SubResource & { properties: DatasetUnion; @@ -1950,7 +2037,7 @@ export type DatasetTarGZipCompression = DatasetCompression & { }; // @public (undocumented) -export type DatasetUnion = Dataset | AmazonS3Dataset | AvroDataset | ExcelDataset | ParquetDataset | DelimitedTextDataset | JsonDataset | XmlDataset | OrcDataset | BinaryDataset | AzureBlobDataset | AzureTableDataset | AzureSqlTableDataset | AzureSqlMITableDataset | AzureSqlDWTableDataset | CassandraTableDataset | CustomDataset | CosmosDbSqlApiCollectionDataset | DocumentDbCollectionDataset | DynamicsEntityDataset | DynamicsCrmEntityDataset | CommonDataServiceForAppsEntityDataset | AzureDataLakeStoreDataset | AzureBlobFSDataset | Office365Dataset | FileShareDataset | MongoDbCollectionDataset | MongoDbAtlasCollectionDataset | MongoDbV2CollectionDataset | CosmosDbMongoDbApiCollectionDataset | ODataResourceDataset | OracleTableDataset | TeradataTableDataset | AzureMySqlTableDataset | AmazonRedshiftTableDataset | Db2TableDataset | RelationalTableDataset | InformixTableDataset | OdbcTableDataset | MySqlTableDataset | PostgreSqlTableDataset | MicrosoftAccessTableDataset | SalesforceObjectDataset | SalesforceServiceCloudObjectDataset | SybaseTableDataset | SapBwCubeDataset | SapCloudForCustomerResourceDataset | SapEccResourceDataset | SapHanaTableDataset | SapOpenHubTableDataset | SqlServerTableDataset | RestResourceDataset | SapTableResourceDataset | WebTableDataset | AzureSearchIndexDataset | HttpDataset | AmazonMWSObjectDataset | AzurePostgreSqlTableDataset | ConcurObjectDataset | CouchbaseTableDataset | DrillTableDataset | EloquaObjectDataset | GoogleBigQueryObjectDataset | GreenplumTableDataset | HBaseObjectDataset | HiveObjectDataset | HubspotObjectDataset | ImpalaObjectDataset | JiraObjectDataset | MagentoObjectDataset | MariaDBTableDataset | AzureMariaDBTableDataset | MarketoObjectDataset | PaypalObjectDataset | PhoenixObjectDataset | PrestoObjectDataset | QuickBooksObjectDataset | ServiceNowObjectDataset | ShopifyObjectDataset | SparkObjectDataset | SquareObjectDataset | XeroObjectDataset | ZohoObjectDataset | NetezzaTableDataset | VerticaTableDataset | SalesforceMarketingCloudObjectDataset | ResponsysObjectDataset | DynamicsAXResourceDataset | OracleServiceCloudObjectDataset | AzureDataExplorerTableDataset | GoogleAdWordsObjectDataset | SnowflakeDataset | SharePointOnlineListResourceDataset | AzureDatabricksDeltaLakeDataset; +export type DatasetUnion = Dataset | AmazonS3Dataset | AvroDataset | ExcelDataset | ParquetDataset | DelimitedTextDataset | JsonDataset | XmlDataset | OrcDataset | BinaryDataset | AzureBlobDataset | AzureTableDataset | AzureSqlTableDataset | AzureSqlMITableDataset | AzureSqlDWTableDataset | CassandraTableDataset | CustomDataset | CosmosDbSqlApiCollectionDataset | DocumentDbCollectionDataset | DynamicsEntityDataset | DynamicsCrmEntityDataset | CommonDataServiceForAppsEntityDataset | AzureDataLakeStoreDataset | AzureBlobFSDataset | Office365Dataset | FileShareDataset | MongoDbCollectionDataset | MongoDbAtlasCollectionDataset | MongoDbV2CollectionDataset | CosmosDbMongoDbApiCollectionDataset | ODataResourceDataset | OracleTableDataset | AmazonRdsForOracleTableDataset | TeradataTableDataset | AzureMySqlTableDataset | AmazonRedshiftTableDataset | Db2TableDataset | RelationalTableDataset | InformixTableDataset | OdbcTableDataset | MySqlTableDataset | PostgreSqlTableDataset | MicrosoftAccessTableDataset | SalesforceObjectDataset | SalesforceServiceCloudObjectDataset | SybaseTableDataset | SapBwCubeDataset | SapCloudForCustomerResourceDataset | SapEccResourceDataset | SapHanaTableDataset | SapOpenHubTableDataset | SqlServerTableDataset | AmazonRdsForSqlServerTableDataset | RestResourceDataset | SapTableResourceDataset | WebTableDataset | AzureSearchIndexDataset | HttpDataset | AmazonMWSObjectDataset | AzurePostgreSqlTableDataset | ConcurObjectDataset | CouchbaseTableDataset | DrillTableDataset | EloquaObjectDataset | GoogleBigQueryObjectDataset | GreenplumTableDataset | HBaseObjectDataset | HiveObjectDataset | HubspotObjectDataset | ImpalaObjectDataset | JiraObjectDataset | MagentoObjectDataset | MariaDBTableDataset | AzureMariaDBTableDataset | MarketoObjectDataset | PaypalObjectDataset | PhoenixObjectDataset | PrestoObjectDataset | QuickBooksObjectDataset | ServiceNowObjectDataset | ShopifyObjectDataset | SparkObjectDataset | SquareObjectDataset | XeroObjectDataset | ZohoObjectDataset | NetezzaTableDataset | VerticaTableDataset | SalesforceMarketingCloudObjectDataset | ResponsysObjectDataset | DynamicsAXResourceDataset | OracleServiceCloudObjectDataset | AzureDataExplorerTableDataset | GoogleAdWordsObjectDataset | SnowflakeDataset | SharePointOnlineListResourceDataset | AzureDatabricksDeltaLakeDataset; // @public export type DatasetZipDeflateCompression = DatasetCompression & { @@ -2016,7 +2103,7 @@ export type DelimitedTextDataset = Dataset & { columnDelimiter?: any; rowDelimiter?: any; encodingName?: any; - compressionCodec?: CompressionCodec; + compressionCodec?: any; compressionLevel?: any; quoteChar?: any; escapeChar?: any; @@ -2043,7 +2130,7 @@ export type DelimitedTextSource = CopySource & { type: "DelimitedTextSource"; storeSettings?: StoreReadSettingsUnion; formatSettings?: DelimitedTextReadSettings; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -2092,7 +2179,7 @@ export type DocumentDbCollectionSource = CopySource & { query?: any; nestingSeparator?: any; queryTimeout?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -2172,12 +2259,12 @@ export type DynamicsCrmEntityDataset = Dataset & { // @public export type DynamicsCrmLinkedService = LinkedService & { type: "DynamicsCrm"; - deploymentType: DynamicsDeploymentType; + deploymentType: any; hostName?: any; port?: any; serviceUri?: any; organizationName?: any; - authenticationType: DynamicsAuthenticationType; + authenticationType: any; username?: any; password?: SecretBaseUnion; servicePrincipalId?: any; @@ -2198,7 +2285,7 @@ export type DynamicsCrmSink = CopySink & { export type DynamicsCrmSource = CopySource & { type: "DynamicsCrmSource"; query?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -2213,23 +2300,20 @@ export type DynamicsEntityDataset = Dataset & { // @public export type DynamicsLinkedService = LinkedService & { type: "Dynamics"; - deploymentType: DynamicsDeploymentType; + deploymentType: any; hostName?: any; port?: any; serviceUri?: any; organizationName?: any; - authenticationType: DynamicsAuthenticationType; + authenticationType: any; username?: any; password?: SecretBaseUnion; servicePrincipalId?: any; - servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + servicePrincipalCredentialType?: any; servicePrincipalCredential?: SecretBaseUnion; encryptedCredential?: any; }; -// @public -export type DynamicsServicePrincipalCredentialType = string; - // @public export type DynamicsSink = CopySink & { type: "DynamicsSink"; @@ -2245,7 +2329,7 @@ export type DynamicsSinkWriteBehavior = string; export type DynamicsSource = CopySource & { type: "DynamicsSource"; query?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -2331,7 +2415,7 @@ export type ExcelDataset = Dataset & { export type ExcelSource = CopySource & { type: "ExcelSource"; storeSettings?: StoreReadSettingsUnion; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -2489,7 +2573,7 @@ export type FileSystemSink = CopySink & { export type FileSystemSource = CopySource & { type: "FileSystemSource"; recursive?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -2579,7 +2663,6 @@ export interface GitHubAccessTokenRequest { gitHubAccessCode: string; gitHubAccessTokenBaseUrl: string; gitHubClientId: string; - gitHubClientSecret?: GitHubClientSecret; } // @public (undocumented) @@ -3140,6 +3223,7 @@ export interface IntegrationRuntimeCustomSetupScriptProperties { // @public export interface IntegrationRuntimeDataFlowProperties { [property: string]: any; + cleanup?: boolean; computeType?: DataFlowComputeType; coreCount?: number; timeToLive?: number; @@ -3152,6 +3236,11 @@ export interface IntegrationRuntimeDataProxyProperties { stagingLinkedService?: EntityReference; } +// @public +export type IntegrationRuntimeDebugResource = SubResourceDebugResource & { + properties: IntegrationRuntimeUnion; +}; + // @public export type IntegrationRuntimeEdition = string; @@ -3280,7 +3369,7 @@ export type JsonDataset = Dataset & { // @public export type JsonFormat = DatasetStorageFormat & { type: "JsonFormat"; - filePattern?: JsonFormatFilePattern; + filePattern?: any; nestingSeparator?: any; encodingName?: any; jsonNodeReference?: any; @@ -3308,7 +3397,7 @@ export type JsonSource = CopySource & { type: "JsonSource"; storeSettings?: StoreReadSettingsUnion; formatSettings?: JsonReadSettings; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -3317,9 +3406,19 @@ export type JsonWriteFilePattern = string; // @public export type JsonWriteSettings = FormatWriteSettings & { type: "JsonWriteSettings"; - filePattern?: JsonWriteFilePattern; + filePattern?: any; }; +// @public +export enum KnownAmazonRdsForOraclePartitionOption { + // (undocumented) + DynamicRange = "DynamicRange", + // (undocumented) + None = "None", + // (undocumented) + PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable" +} + // @public export enum KnownAvroCompressionCodec { // (undocumented) @@ -3421,6 +3520,10 @@ export enum KnownCompressionCodec { // (undocumented) Lz4 = "lz4", // (undocumented) + Lzo = "lzo", + // (undocumented) + None = "none", + // (undocumented) Snappy = "snappy", // (undocumented) Tar = "tar", @@ -3450,6 +3553,16 @@ export enum KnownDataFlowComputeType { MemoryOptimized = "MemoryOptimized" } +// @public +export enum KnownDataFlowDebugCommandType { + // (undocumented) + ExecuteExpressionQuery = "executeExpressionQuery", + // (undocumented) + ExecutePreviewQuery = "executePreviewQuery", + // (undocumented) + ExecuteStatisticsQuery = "executeStatisticsQuery" +} + // @public export enum KnownDataFlowReferenceType { // (undocumented) @@ -3506,14 +3619,6 @@ export enum KnownDynamicsDeploymentType { OnPremisesWithIfd = "OnPremisesWithIfd" } -// @public -export enum KnownDynamicsServicePrincipalCredentialType { - // (undocumented) - ServicePrincipalCert = "ServicePrincipalCert", - // (undocumented) - ServicePrincipalKey = "ServicePrincipalKey" -} - // @public export enum KnownDynamicsSinkWriteBehavior { // (undocumented) @@ -3738,6 +3843,32 @@ export enum KnownJsonWriteFilePattern { SetOfObjects = "setOfObjects" } +// @public +export enum KnownLivyStates { + // (undocumented) + Busy = "busy", + // (undocumented) + Dead = "dead", + // (undocumented) + Error = "error", + // (undocumented) + Idle = "idle", + // (undocumented) + Killed = "killed", + // (undocumented) + NotStarted = "not_started", + // (undocumented) + Recovering = "recovering", + // (undocumented) + Running = "running", + // (undocumented) + ShuttingDown = "shutting_down", + // (undocumented) + Starting = "starting", + // (undocumented) + Success = "success" +} + // @public export enum KnownMongoDbAuthenticationType { // (undocumented) @@ -4340,23 +4471,130 @@ export enum KnownWebHookActivityMethod { Post = "POST" } -// @public -export interface Library { - append(libraryName: string, content: coreRestPipeline.RequestBodyType, options?: LibraryAppendOptionalParams): Promise; - beginCreate(libraryName: string, options?: LibraryCreateOptionalParams): Promise, void>>; - beginCreateAndWait(libraryName: string, options?: LibraryCreateOptionalParams): Promise; - beginDelete(libraryName: string, options?: LibraryDeleteOptionalParams): Promise, void>>; - beginDeleteAndWait(libraryName: string, options?: LibraryDeleteOptionalParams): Promise; - beginFlush(libraryName: string, options?: LibraryFlushOptionalParams): Promise, void>>; - beginFlushAndWait(libraryName: string, options?: LibraryFlushOptionalParams): Promise; - get(libraryName: string, options?: LibraryGetOptionalParams): Promise; - getOperationResult(operationId: string, options?: LibraryGetOperationResultOptionalParams): Promise; - list(options?: LibraryListOptionalParams): PagedAsyncIterableIterator; +// @public (undocumented) +export interface KqlScript { + // (undocumented) + content?: KqlScriptContent; } -// @public -export interface LibraryAppendOptionalParams extends coreClient.OperationOptions { - blobConditionAppendPosition?: number; +// @public (undocumented) +export interface KqlScriptContent { + // (undocumented) + currentConnection?: KqlScriptContentCurrentConnection; + // (undocumented) + metadata?: KqlScriptContentMetadata; + // (undocumented) + query?: string; +} + +// @public (undocumented) +export interface KqlScriptContentCurrentConnection { + // (undocumented) + name?: string; + // (undocumented) + type?: string; +} + +// @public (undocumented) +export interface KqlScriptContentMetadata { + // (undocumented) + language?: string; +} + +// @public +export interface KqlScriptCreateOrUpdateOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + +// @public +export type KqlScriptCreateOrUpdateResponse = KqlScriptResource; + +// @public +export interface KqlScriptDeleteByNameOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + +// @public +export interface KqlScriptGetByNameOptionalParams extends coreClient.OperationOptions { +} + +// @public +export type KqlScriptGetByNameResponse = KqlScriptResource; + +// @public +export interface KqlScriptOperations { + beginCreateOrUpdate(kqlScriptName: string, kqlScript: KqlScriptResource, options?: KqlScriptCreateOrUpdateOptionalParams): Promise, KqlScriptCreateOrUpdateResponse>>; + beginCreateOrUpdateAndWait(kqlScriptName: string, kqlScript: KqlScriptResource, options?: KqlScriptCreateOrUpdateOptionalParams): Promise; + beginDeleteByName(kqlScriptName: string, options?: KqlScriptDeleteByNameOptionalParams): Promise, void>>; + beginDeleteByNameAndWait(kqlScriptName: string, options?: KqlScriptDeleteByNameOptionalParams): Promise; + beginRename(kqlScriptName: string, renameRequest: ArtifactRenameRequest, options?: KqlScriptRenameOptionalParams): Promise, void>>; + beginRenameAndWait(kqlScriptName: string, renameRequest: ArtifactRenameRequest, options?: KqlScriptRenameOptionalParams): Promise; + getByName(kqlScriptName: string, options?: KqlScriptGetByNameOptionalParams): Promise; +} + +// @public +export interface KqlScriptRenameOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + +// @public (undocumented) +export interface KqlScriptResource { + // (undocumented) + id?: string; + // (undocumented) + name?: string; + properties?: KqlScript; + // (undocumented) + type?: string; +} + +// @public +export interface KqlScripts { + listAll(options?: KqlScriptsGetAllOptionalParams): PagedAsyncIterableIterator; +} + +// @public +export interface KqlScriptsGetAllNextOptionalParams extends coreClient.OperationOptions { +} + +// @public +export type KqlScriptsGetAllNextResponse = KqlScriptsResourceCollectionResponse; + +// @public +export interface KqlScriptsGetAllOptionalParams extends coreClient.OperationOptions { +} + +// @public +export type KqlScriptsGetAllResponse = KqlScriptsResourceCollectionResponse; + +// @public (undocumented) +export interface KqlScriptsResourceCollectionResponse { + // (undocumented) + nextLink?: string; + // (undocumented) + value?: KqlScriptResource[]; +} + +// @public +export interface Library { + append(libraryName: string, content: coreRestPipeline.RequestBodyType, options?: LibraryAppendOptionalParams): Promise; + beginCreate(libraryName: string, options?: LibraryCreateOptionalParams): Promise, void>>; + beginCreateAndWait(libraryName: string, options?: LibraryCreateOptionalParams): Promise; + beginDelete(libraryName: string, options?: LibraryDeleteOptionalParams): Promise, void>>; + beginDeleteAndWait(libraryName: string, options?: LibraryDeleteOptionalParams): Promise; + beginFlush(libraryName: string, options?: LibraryFlushOptionalParams): Promise, void>>; + beginFlushAndWait(libraryName: string, options?: LibraryFlushOptionalParams): Promise; + get(libraryName: string, options?: LibraryGetOptionalParams): Promise; + getOperationResult(operationId: string, options?: LibraryGetOperationResultOptionalParams): Promise; + list(options?: LibraryListOptionalParams): PagedAsyncIterableIterator; +} + +// @public +export interface LibraryAppendOptionalParams extends coreClient.OperationOptions { + blobConditionAppendPosition?: number; } // @public @@ -4399,7 +4637,7 @@ export interface LibraryInfo { path?: string; readonly provisioningStatus?: string; type?: string; - readonly uploadedTimestamp?: Date; + uploadedTimestamp?: Date; } // @public @@ -4487,74 +4725,68 @@ export interface LinkedService { parameters?: { [propertyName: string]: ParameterSpecification; }; - type: "AzureStorage" | "AzureBlobStorage" | "AzureTableStorage" | "AzureSqlDW" | "SqlServer" | "AzureSqlDatabase" | "AzureSqlMI" | "AzureBatch" | "AzureKeyVault" | "CosmosDb" | "Dynamics" | "DynamicsCrm" | "CommonDataServiceForApps" | "HDInsight" | "FileServer" | "AzureFileStorage" | "GoogleCloudStorage" | "Oracle" | "AzureMySql" | "MySql" | "PostgreSql" | "Sybase" | "Db2" | "Teradata" | "AzureML" | "AzureMLService" | "Odbc" | "Informix" | "MicrosoftAccess" | "Hdfs" | "OData" | "Web" | "Cassandra" | "MongoDb" | "MongoDbAtlas" | "MongoDbV2" | "CosmosDbMongoDbApi" | "AzureDataLakeStore" | "AzureBlobFS" | "Office365" | "Salesforce" | "SalesforceServiceCloud" | "SapCloudForCustomer" | "SapEcc" | "SapOpenHub" | "RestService" | "AmazonS3" | "AmazonRedshift" | "CustomDataSource" | "AzureSearch" | "HttpServer" | "FtpServer" | "Sftp" | "SapBW" | "SapHana" | "AmazonMWS" | "AzurePostgreSql" | "Concur" | "Couchbase" | "Drill" | "Eloqua" | "GoogleBigQuery" | "Greenplum" | "HBase" | "Hive" | "Hubspot" | "Impala" | "Jira" | "Magento" | "MariaDB" | "AzureMariaDB" | "Marketo" | "Paypal" | "Phoenix" | "Presto" | "QuickBooks" | "ServiceNow" | "Shopify" | "Spark" | "Square" | "Xero" | "Zoho" | "Vertica" | "Netezza" | "SalesforceMarketingCloud" | "HDInsightOnDemand" | "AzureDataLakeAnalytics" | "AzureDatabricks" | "AzureDatabricksDeltaLake" | "Responsys" | "DynamicsAX" | "OracleServiceCloud" | "GoogleAdWords" | "SapTable" | "AzureDataExplorer" | "AzureFunction" | "Snowflake" | "SharePointOnlineList"; + type: "AzureStorage" | "AzureBlobStorage" | "AzureTableStorage" | "AzureSqlDW" | "SqlServer" | "AmazonRdsForSqlServer" | "AzureSqlDatabase" | "AzureSqlMI" | "AzureBatch" | "AzureKeyVault" | "CosmosDb" | "Dynamics" | "DynamicsCrm" | "CommonDataServiceForApps" | "HDInsight" | "FileServer" | "AzureFileStorage" | "GoogleCloudStorage" | "Oracle" | "AmazonRdsForOracle" | "AzureMySql" | "MySql" | "PostgreSql" | "Sybase" | "Db2" | "Teradata" | "AzureML" | "AzureMLService" | "Odbc" | "Informix" | "MicrosoftAccess" | "Hdfs" | "OData" | "Web" | "Cassandra" | "MongoDb" | "MongoDbAtlas" | "MongoDbV2" | "CosmosDbMongoDbApi" | "AzureDataLakeStore" | "AzureBlobFS" | "Office365" | "Salesforce" | "SalesforceServiceCloud" | "SapCloudForCustomer" | "SapEcc" | "SapOpenHub" | "RestService" | "AmazonS3" | "AmazonRedshift" | "CustomDataSource" | "AzureSearch" | "HttpServer" | "FtpServer" | "Sftp" | "SapBW" | "SapHana" | "AmazonMWS" | "AzurePostgreSql" | "Concur" | "Couchbase" | "Drill" | "Eloqua" | "GoogleBigQuery" | "Greenplum" | "HBase" | "Hive" | "Hubspot" | "Impala" | "Jira" | "Magento" | "MariaDB" | "AzureMariaDB" | "Marketo" | "Paypal" | "Phoenix" | "Presto" | "QuickBooks" | "ServiceNow" | "Shopify" | "Spark" | "Square" | "Xero" | "Zoho" | "Vertica" | "Netezza" | "SalesforceMarketingCloud" | "HDInsightOnDemand" | "AzureDataLakeAnalytics" | "AzureDatabricks" | "AzureDatabricksDeltaLake" | "Responsys" | "DynamicsAX" | "OracleServiceCloud" | "GoogleAdWords" | "SapTable" | "AzureDataExplorer" | "AzureFunction" | "Snowflake" | "SharePointOnlineList"; } // @public -export type LinkedServiceDebugResource = SubResourceDebugResource & { - properties: LinkedServiceUnion; -}; - -// @public -export interface LinkedServiceListResponse { - nextLink?: string; - value: LinkedServiceResource[]; -} - -// @public -export interface LinkedServiceOperations { - beginCreateOrUpdateLinkedService(linkedServiceName: string, linkedService: LinkedServiceResource, options?: LinkedServiceOperationsCreateOrUpdateLinkedServiceOptionalParams): Promise, LinkedServiceOperationsCreateOrUpdateLinkedServiceResponse>>; - beginCreateOrUpdateLinkedServiceAndWait(linkedServiceName: string, linkedService: LinkedServiceResource, options?: LinkedServiceOperationsCreateOrUpdateLinkedServiceOptionalParams): Promise; - beginDeleteLinkedService(linkedServiceName: string, options?: LinkedServiceOperationsDeleteLinkedServiceOptionalParams): Promise, void>>; - beginDeleteLinkedServiceAndWait(linkedServiceName: string, options?: LinkedServiceOperationsDeleteLinkedServiceOptionalParams): Promise; - beginRenameLinkedService(linkedServiceName: string, request: ArtifactRenameRequest, options?: LinkedServiceOperationsRenameLinkedServiceOptionalParams): Promise, void>>; - beginRenameLinkedServiceAndWait(linkedServiceName: string, request: ArtifactRenameRequest, options?: LinkedServiceOperationsRenameLinkedServiceOptionalParams): Promise; - getLinkedService(linkedServiceName: string, options?: LinkedServiceOperationsGetLinkedServiceOptionalParams): Promise; - listLinkedServicesByWorkspace(options?: LinkedServiceOperationsGetLinkedServicesByWorkspaceOptionalParams): PagedAsyncIterableIterator; -} - -// @public -export interface LinkedServiceOperationsCreateOrUpdateLinkedServiceOptionalParams extends coreClient.OperationOptions { +export interface LinkedServiceCreateOrUpdateLinkedServiceOptionalParams extends coreClient.OperationOptions { ifMatch?: string; resumeFrom?: string; updateIntervalInMs?: number; } // @public -export type LinkedServiceOperationsCreateOrUpdateLinkedServiceResponse = LinkedServiceResource; +export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceResource; // @public -export interface LinkedServiceOperationsDeleteLinkedServiceOptionalParams extends coreClient.OperationOptions { +export type LinkedServiceDebugResource = SubResourceDebugResource & { + properties: LinkedServiceUnion; +}; + +// @public +export interface LinkedServiceDeleteLinkedServiceOptionalParams extends coreClient.OperationOptions { resumeFrom?: string; updateIntervalInMs?: number; } // @public -export interface LinkedServiceOperationsGetLinkedServiceOptionalParams extends coreClient.OperationOptions { +export interface LinkedServiceGetLinkedServiceOptionalParams extends coreClient.OperationOptions { ifNoneMatch?: string; } // @public -export type LinkedServiceOperationsGetLinkedServiceResponse = LinkedServiceResource; +export type LinkedServiceGetLinkedServiceResponse = LinkedServiceResource; // @public -export interface LinkedServiceOperationsGetLinkedServicesByWorkspaceNextOptionalParams extends coreClient.OperationOptions { +export interface LinkedServiceGetLinkedServicesByWorkspaceNextOptionalParams extends coreClient.OperationOptions { } // @public -export type LinkedServiceOperationsGetLinkedServicesByWorkspaceNextResponse = LinkedServiceListResponse; +export type LinkedServiceGetLinkedServicesByWorkspaceNextResponse = LinkedServiceListResponse; // @public -export interface LinkedServiceOperationsGetLinkedServicesByWorkspaceOptionalParams extends coreClient.OperationOptions { +export interface LinkedServiceGetLinkedServicesByWorkspaceOptionalParams extends coreClient.OperationOptions { } // @public -export type LinkedServiceOperationsGetLinkedServicesByWorkspaceResponse = LinkedServiceListResponse; +export type LinkedServiceGetLinkedServicesByWorkspaceResponse = LinkedServiceListResponse; // @public -export interface LinkedServiceOperationsRenameLinkedServiceOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; +export interface LinkedServiceListResponse { + nextLink?: string; + value: LinkedServiceResource[]; +} + +// @public +export interface LinkedServiceOperations { + beginCreateOrUpdateLinkedService(linkedServiceName: string, linkedService: LinkedServiceResource, options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams): Promise, LinkedServiceCreateOrUpdateLinkedServiceResponse>>; + beginCreateOrUpdateLinkedServiceAndWait(linkedServiceName: string, linkedService: LinkedServiceResource, options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams): Promise; + beginDeleteLinkedService(linkedServiceName: string, options?: LinkedServiceDeleteLinkedServiceOptionalParams): Promise, void>>; + beginDeleteLinkedServiceAndWait(linkedServiceName: string, options?: LinkedServiceDeleteLinkedServiceOptionalParams): Promise; + beginRenameLinkedService(linkedServiceName: string, request: ArtifactRenameRequest, options?: LinkedServiceRenameLinkedServiceOptionalParams): Promise, void>>; + beginRenameLinkedServiceAndWait(linkedServiceName: string, request: ArtifactRenameRequest, options?: LinkedServiceRenameLinkedServiceOptionalParams): Promise; + getLinkedService(linkedServiceName: string, options?: LinkedServiceGetLinkedServiceOptionalParams): Promise; + listLinkedServicesByWorkspace(options?: LinkedServiceGetLinkedServicesByWorkspaceOptionalParams): PagedAsyncIterableIterator; } // @public @@ -4566,13 +4798,22 @@ export interface LinkedServiceReference { type: Type; } +// @public +export interface LinkedServiceRenameLinkedServiceOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + // @public export type LinkedServiceResource = SubResource & { properties: LinkedServiceUnion; }; // @public (undocumented) -export type LinkedServiceUnion = LinkedService | AzureStorageLinkedService | AzureBlobStorageLinkedService | AzureTableStorageLinkedService | AzureSqlDWLinkedService | SqlServerLinkedService | AzureSqlDatabaseLinkedService | AzureSqlMILinkedService | AzureBatchLinkedService | AzureKeyVaultLinkedService | CosmosDbLinkedService | DynamicsLinkedService | DynamicsCrmLinkedService | CommonDataServiceForAppsLinkedService | HDInsightLinkedService | FileServerLinkedService | AzureFileStorageLinkedService | GoogleCloudStorageLinkedService | OracleLinkedService | AzureMySqlLinkedService | MySqlLinkedService | PostgreSqlLinkedService | SybaseLinkedService | Db2LinkedService | TeradataLinkedService | AzureMLLinkedService | AzureMLServiceLinkedService | OdbcLinkedService | InformixLinkedService | MicrosoftAccessLinkedService | HdfsLinkedService | ODataLinkedService | WebLinkedService | CassandraLinkedService | MongoDbLinkedService | MongoDbAtlasLinkedService | MongoDbV2LinkedService | CosmosDbMongoDbApiLinkedService | AzureDataLakeStoreLinkedService | AzureBlobFSLinkedService | Office365LinkedService | SalesforceLinkedService | SalesforceServiceCloudLinkedService | SapCloudForCustomerLinkedService | SapEccLinkedService | SapOpenHubLinkedService | RestServiceLinkedService | AmazonS3LinkedService | AmazonRedshiftLinkedService | CustomDataSourceLinkedService | AzureSearchLinkedService | HttpLinkedService | FtpServerLinkedService | SftpServerLinkedService | SapBWLinkedService | SapHanaLinkedService | AmazonMWSLinkedService | AzurePostgreSqlLinkedService | ConcurLinkedService | CouchbaseLinkedService | DrillLinkedService | EloquaLinkedService | GoogleBigQueryLinkedService | GreenplumLinkedService | HBaseLinkedService | HiveLinkedService | HubspotLinkedService | ImpalaLinkedService | JiraLinkedService | MagentoLinkedService | MariaDBLinkedService | AzureMariaDBLinkedService | MarketoLinkedService | PaypalLinkedService | PhoenixLinkedService | PrestoLinkedService | QuickBooksLinkedService | ServiceNowLinkedService | ShopifyLinkedService | SparkLinkedService | SquareLinkedService | XeroLinkedService | ZohoLinkedService | VerticaLinkedService | NetezzaLinkedService | SalesforceMarketingCloudLinkedService | HDInsightOnDemandLinkedService | AzureDataLakeAnalyticsLinkedService | AzureDatabricksLinkedService | AzureDatabricksDeltaLakeLinkedService | ResponsysLinkedService | DynamicsAXLinkedService | OracleServiceCloudLinkedService | GoogleAdWordsLinkedService | SapTableLinkedService | AzureDataExplorerLinkedService | AzureFunctionLinkedService | SnowflakeLinkedService | SharePointOnlineListLinkedService; +export type LinkedServiceUnion = LinkedService | AzureStorageLinkedService | AzureBlobStorageLinkedService | AzureTableStorageLinkedService | AzureSqlDWLinkedService | SqlServerLinkedService | AmazonRdsForSqlServerLinkedService | AzureSqlDatabaseLinkedService | AzureSqlMILinkedService | AzureBatchLinkedService | AzureKeyVaultLinkedService | CosmosDbLinkedService | DynamicsLinkedService | DynamicsCrmLinkedService | CommonDataServiceForAppsLinkedService | HDInsightLinkedService | FileServerLinkedService | AzureFileStorageLinkedService | GoogleCloudStorageLinkedService | OracleLinkedService | AmazonRdsForOracleLinkedService | AzureMySqlLinkedService | MySqlLinkedService | PostgreSqlLinkedService | SybaseLinkedService | Db2LinkedService | TeradataLinkedService | AzureMLLinkedService | AzureMLServiceLinkedService | OdbcLinkedService | InformixLinkedService | MicrosoftAccessLinkedService | HdfsLinkedService | ODataLinkedService | WebLinkedService | CassandraLinkedService | MongoDbLinkedService | MongoDbAtlasLinkedService | MongoDbV2LinkedService | CosmosDbMongoDbApiLinkedService | AzureDataLakeStoreLinkedService | AzureBlobFSLinkedService | Office365LinkedService | SalesforceLinkedService | SalesforceServiceCloudLinkedService | SapCloudForCustomerLinkedService | SapEccLinkedService | SapOpenHubLinkedService | RestServiceLinkedService | AmazonS3LinkedService | AmazonRedshiftLinkedService | CustomDataSourceLinkedService | AzureSearchLinkedService | HttpLinkedService | FtpServerLinkedService | SftpServerLinkedService | SapBWLinkedService | SapHanaLinkedService | AmazonMWSLinkedService | AzurePostgreSqlLinkedService | ConcurLinkedService | CouchbaseLinkedService | DrillLinkedService | EloquaLinkedService | GoogleBigQueryLinkedService | GreenplumLinkedService | HBaseLinkedService | HiveLinkedService | HubspotLinkedService | ImpalaLinkedService | JiraLinkedService | MagentoLinkedService | MariaDBLinkedService | AzureMariaDBLinkedService | MarketoLinkedService | PaypalLinkedService | PhoenixLinkedService | PrestoLinkedService | QuickBooksLinkedService | ServiceNowLinkedService | ShopifyLinkedService | SparkLinkedService | SquareLinkedService | XeroLinkedService | ZohoLinkedService | VerticaLinkedService | NetezzaLinkedService | SalesforceMarketingCloudLinkedService | HDInsightOnDemandLinkedService | AzureDataLakeAnalyticsLinkedService | AzureDatabricksLinkedService | AzureDatabricksDeltaLakeLinkedService | ResponsysLinkedService | DynamicsAXLinkedService | OracleServiceCloudLinkedService | GoogleAdWordsLinkedService | SapTableLinkedService | AzureDataExplorerLinkedService | AzureFunctionLinkedService | SnowflakeLinkedService | SharePointOnlineListLinkedService; + +// @public +export type LivyStates = string; // @public export interface LogLocationSettings { @@ -4730,7 +4971,7 @@ export type MicrosoftAccessSink = CopySink & { export type MicrosoftAccessSource = CopySource & { type: "MicrosoftAccessSource"; query?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -4759,7 +5000,7 @@ export type MongoDbAtlasSource = CopySource & { cursorMethods?: MongoDbCursorMethodsProperties; batchSize?: any; queryTimeout?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -4799,7 +5040,7 @@ export type MongoDbLinkedService = LinkedService & { export type MongoDbSource = CopySource & { type: "MongoDbSource"; query?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -4822,7 +5063,7 @@ export type MongoDbV2Source = CopySource & { cursorMethods?: MongoDbCursorMethodsProperties; batchSize?: any; queryTimeout?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -4900,6 +5141,7 @@ export interface Notebook { bigDataPool?: BigDataPoolReference; cells: NotebookCell[]; description?: string; + folder?: NotebookFolder; metadata: NotebookMetadata; nbformat: number; nbformatMinor: number; @@ -4927,106 +5169,120 @@ export interface NotebookCellOutputItem { } // @public -export interface NotebookKernelSpec { - [property: string]: any; - displayName: string; - name: string; +export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreClient.OperationOptions { + ifMatch?: string; + resumeFrom?: string; + updateIntervalInMs?: number; } // @public -export interface NotebookLanguageInfo { - [property: string]: any; - codemirrorMode?: string; - name: string; -} +export type NotebookCreateOrUpdateNotebookResponse = NotebookResource; // @public -export interface NotebookListResponse { - nextLink?: string; - value: NotebookResource[]; +export interface NotebookDeleteNotebookOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; } // @public -export interface NotebookMetadata { - [property: string]: any; - kernelspec?: NotebookKernelSpec; - languageInfo?: NotebookLanguageInfo; +export interface NotebookFolder { + name?: string; } // @public -export interface NotebookOperations { - beginCreateOrUpdateNotebook(notebookName: string, notebook: NotebookResource, options?: NotebookOperationsCreateOrUpdateNotebookOptionalParams): Promise, NotebookOperationsCreateOrUpdateNotebookResponse>>; - beginCreateOrUpdateNotebookAndWait(notebookName: string, notebook: NotebookResource, options?: NotebookOperationsCreateOrUpdateNotebookOptionalParams): Promise; - beginDeleteNotebook(notebookName: string, options?: NotebookOperationsDeleteNotebookOptionalParams): Promise, void>>; - beginDeleteNotebookAndWait(notebookName: string, options?: NotebookOperationsDeleteNotebookOptionalParams): Promise; - beginRenameNotebook(notebookName: string, request: ArtifactRenameRequest, options?: NotebookOperationsRenameNotebookOptionalParams): Promise, void>>; - beginRenameNotebookAndWait(notebookName: string, request: ArtifactRenameRequest, options?: NotebookOperationsRenameNotebookOptionalParams): Promise; - getNotebook(notebookName: string, options?: NotebookOperationsGetNotebookOptionalParams): Promise; - listNotebooksByWorkspace(options?: NotebookOperationsGetNotebooksByWorkspaceOptionalParams): PagedAsyncIterableIterator; - listNotebookSummaryByWorkSpace(options?: NotebookOperationsGetNotebookSummaryByWorkSpaceOptionalParams): PagedAsyncIterableIterator; +export interface NotebookGetNotebookOptionalParams extends coreClient.OperationOptions { + ifNoneMatch?: string; } // @public -export interface NotebookOperationsCreateOrUpdateNotebookOptionalParams extends coreClient.OperationOptions { - ifMatch?: string; - resumeFrom?: string; - updateIntervalInMs?: number; +export type NotebookGetNotebookResponse = NotebookResource; + +// @public +export interface NotebookGetNotebooksByWorkspaceNextOptionalParams extends coreClient.OperationOptions { } // @public -export type NotebookOperationsCreateOrUpdateNotebookResponse = NotebookResource; +export type NotebookGetNotebooksByWorkspaceNextResponse = NotebookListResponse; // @public -export interface NotebookOperationsDeleteNotebookOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; +export interface NotebookGetNotebooksByWorkspaceOptionalParams extends coreClient.OperationOptions { } // @public -export interface NotebookOperationsGetNotebookOptionalParams extends coreClient.OperationOptions { - ifNoneMatch?: string; +export type NotebookGetNotebooksByWorkspaceResponse = NotebookListResponse; + +// @public +export interface NotebookGetNotebookSummaryByWorkSpaceNextOptionalParams extends coreClient.OperationOptions { } // @public -export type NotebookOperationsGetNotebookResponse = NotebookResource; +export type NotebookGetNotebookSummaryByWorkSpaceNextResponse = NotebookListResponse; // @public -export interface NotebookOperationsGetNotebooksByWorkspaceNextOptionalParams extends coreClient.OperationOptions { +export interface NotebookGetNotebookSummaryByWorkSpaceOptionalParams extends coreClient.OperationOptions { } // @public -export type NotebookOperationsGetNotebooksByWorkspaceNextResponse = NotebookListResponse; +export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse; // @public -export interface NotebookOperationsGetNotebooksByWorkspaceOptionalParams extends coreClient.OperationOptions { +export interface NotebookKernelSpec { + [property: string]: any; + displayName: string; + name: string; } // @public -export type NotebookOperationsGetNotebooksByWorkspaceResponse = NotebookListResponse; +export interface NotebookLanguageInfo { + [property: string]: any; + codemirrorMode?: string; + name: string; +} // @public -export interface NotebookOperationsGetNotebookSummaryByWorkSpaceNextOptionalParams extends coreClient.OperationOptions { +export interface NotebookListResponse { + nextLink?: string; + value: NotebookResource[]; } // @public -export type NotebookOperationsGetNotebookSummaryByWorkSpaceNextResponse = NotebookListResponse; +export interface NotebookMetadata { + [property: string]: any; + kernelspec?: NotebookKernelSpec; + languageInfo?: NotebookLanguageInfo; +} // @public -export interface NotebookOperationsGetNotebookSummaryByWorkSpaceOptionalParams extends coreClient.OperationOptions { +export interface NotebookOperationResult { + get(operationId: string, options?: NotebookOperationResultGetOptionalParams): Promise; } // @public -export type NotebookOperationsGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse; +export interface NotebookOperationResultGetOptionalParams extends coreClient.OperationOptions { +} // @public -export interface NotebookOperationsRenameNotebookOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; +export interface NotebookOperations { + beginCreateOrUpdateNotebook(notebookName: string, notebook: NotebookResource, options?: NotebookCreateOrUpdateNotebookOptionalParams): Promise, NotebookCreateOrUpdateNotebookResponse>>; + beginCreateOrUpdateNotebookAndWait(notebookName: string, notebook: NotebookResource, options?: NotebookCreateOrUpdateNotebookOptionalParams): Promise; + beginDeleteNotebook(notebookName: string, options?: NotebookDeleteNotebookOptionalParams): Promise, void>>; + beginDeleteNotebookAndWait(notebookName: string, options?: NotebookDeleteNotebookOptionalParams): Promise; + beginRenameNotebook(notebookName: string, request: ArtifactRenameRequest, options?: NotebookRenameNotebookOptionalParams): Promise, void>>; + beginRenameNotebookAndWait(notebookName: string, request: ArtifactRenameRequest, options?: NotebookRenameNotebookOptionalParams): Promise; + getNotebook(notebookName: string, options?: NotebookGetNotebookOptionalParams): Promise; + listNotebooksByWorkspace(options?: NotebookGetNotebooksByWorkspaceOptionalParams): PagedAsyncIterableIterator; + listNotebookSummaryByWorkSpace(options?: NotebookGetNotebookSummaryByWorkSpaceOptionalParams): PagedAsyncIterableIterator; } // @public export type NotebookReferenceType = string; +// @public +export interface NotebookRenameNotebookOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + // @public export interface NotebookResource { readonly etag?: string; @@ -5080,7 +5336,7 @@ export type ODataSource = CopySource & { type: "ODataSource"; query?: any; httpRequestTimeout?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -5143,7 +5399,7 @@ export type Office365Source = CopySource & { // @public export interface OperationResult { code?: string; - details?: CloudError[]; + details?: CloudErrorAutoGenerated[]; message?: string; readonly status?: string; target?: string; @@ -5205,7 +5461,7 @@ export type OracleSource = CopySource & { queryTimeout?: any; partitionOption?: OraclePartitionOption; partitionSettings?: OraclePartitionSettings; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -5223,7 +5479,7 @@ export type OrcCompressionCodec = string; export type OrcDataset = Dataset & { type: "Orc"; location?: DatasetLocationUnion; - orcCompressionCodec?: OrcCompressionCodec; + orcCompressionCodec?: any; }; // @public @@ -5242,7 +5498,7 @@ export type OrcSink = CopySink & { export type OrcSource = CopySource & { type: "OrcSource"; storeSettings?: StoreReadSettingsUnion; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -5287,7 +5543,7 @@ export type ParquetSink = CopySink & { export type ParquetSource = CopySource & { type: "ParquetSource"; storeSettings?: StoreReadSettingsUnion; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -5356,41 +5612,17 @@ export type PhoenixSource = TabularSource & { }; // @public -export interface PipelineFolder { - name?: string; -} - -// @public -export interface PipelineListResponse { - nextLink?: string; - value: PipelineResource[]; -} - -// @public -export interface PipelineOperations { - beginCreateOrUpdatePipeline(pipelineName: string, pipeline: PipelineResource, options?: PipelineOperationsCreateOrUpdatePipelineOptionalParams): Promise, PipelineOperationsCreateOrUpdatePipelineResponse>>; - beginCreateOrUpdatePipelineAndWait(pipelineName: string, pipeline: PipelineResource, options?: PipelineOperationsCreateOrUpdatePipelineOptionalParams): Promise; - beginDeletePipeline(pipelineName: string, options?: PipelineOperationsDeletePipelineOptionalParams): Promise, void>>; - beginDeletePipelineAndWait(pipelineName: string, options?: PipelineOperationsDeletePipelineOptionalParams): Promise; - beginRenamePipeline(pipelineName: string, request: ArtifactRenameRequest, options?: PipelineOperationsRenamePipelineOptionalParams): Promise, void>>; - beginRenamePipelineAndWait(pipelineName: string, request: ArtifactRenameRequest, options?: PipelineOperationsRenamePipelineOptionalParams): Promise; - createPipelineRun(pipelineName: string, options?: PipelineOperationsCreatePipelineRunOptionalParams): Promise; - getPipeline(pipelineName: string, options?: PipelineOperationsGetPipelineOptionalParams): Promise; - listPipelinesByWorkspace(options?: PipelineOperationsGetPipelinesByWorkspaceOptionalParams): PagedAsyncIterableIterator; -} - -// @public -export interface PipelineOperationsCreateOrUpdatePipelineOptionalParams extends coreClient.OperationOptions { +export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreClient.OperationOptions { ifMatch?: string; resumeFrom?: string; updateIntervalInMs?: number; } // @public -export type PipelineOperationsCreateOrUpdatePipelineResponse = PipelineResource; +export type PipelineCreateOrUpdatePipelineResponse = PipelineResource; // @public -export interface PipelineOperationsCreatePipelineRunOptionalParams extends coreClient.OperationOptions { +export interface PipelineCreatePipelineRunOptionalParams extends coreClient.OperationOptions { isRecovery?: boolean; parameters?: { [propertyName: string]: any; @@ -5400,40 +5632,58 @@ export interface PipelineOperationsCreatePipelineRunOptionalParams extends coreC } // @public -export type PipelineOperationsCreatePipelineRunResponse = CreateRunResponse; +export type PipelineCreatePipelineRunResponse = CreateRunResponse; // @public -export interface PipelineOperationsDeletePipelineOptionalParams extends coreClient.OperationOptions { +export interface PipelineDeletePipelineOptionalParams extends coreClient.OperationOptions { resumeFrom?: string; updateIntervalInMs?: number; } // @public -export interface PipelineOperationsGetPipelineOptionalParams extends coreClient.OperationOptions { +export interface PipelineFolder { + name?: string; +} + +// @public +export interface PipelineGetPipelineOptionalParams extends coreClient.OperationOptions { ifNoneMatch?: string; } // @public -export type PipelineOperationsGetPipelineResponse = PipelineResource; +export type PipelineGetPipelineResponse = PipelineResource; // @public -export interface PipelineOperationsGetPipelinesByWorkspaceNextOptionalParams extends coreClient.OperationOptions { +export interface PipelineGetPipelinesByWorkspaceNextOptionalParams extends coreClient.OperationOptions { } // @public -export type PipelineOperationsGetPipelinesByWorkspaceNextResponse = PipelineListResponse; +export type PipelineGetPipelinesByWorkspaceNextResponse = PipelineListResponse; // @public -export interface PipelineOperationsGetPipelinesByWorkspaceOptionalParams extends coreClient.OperationOptions { +export interface PipelineGetPipelinesByWorkspaceOptionalParams extends coreClient.OperationOptions { } // @public -export type PipelineOperationsGetPipelinesByWorkspaceResponse = PipelineListResponse; +export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse; // @public -export interface PipelineOperationsRenamePipelineOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; +export interface PipelineListResponse { + nextLink?: string; + value: PipelineResource[]; +} + +// @public +export interface PipelineOperations { + beginCreateOrUpdatePipeline(pipelineName: string, pipeline: PipelineResource, options?: PipelineCreateOrUpdatePipelineOptionalParams): Promise, PipelineCreateOrUpdatePipelineResponse>>; + beginCreateOrUpdatePipelineAndWait(pipelineName: string, pipeline: PipelineResource, options?: PipelineCreateOrUpdatePipelineOptionalParams): Promise; + beginDeletePipeline(pipelineName: string, options?: PipelineDeletePipelineOptionalParams): Promise, void>>; + beginDeletePipelineAndWait(pipelineName: string, options?: PipelineDeletePipelineOptionalParams): Promise; + beginRenamePipeline(pipelineName: string, request: ArtifactRenameRequest, options?: PipelineRenamePipelineOptionalParams): Promise, void>>; + beginRenamePipelineAndWait(pipelineName: string, request: ArtifactRenameRequest, options?: PipelineRenamePipelineOptionalParams): Promise; + createPipelineRun(pipelineName: string, options?: PipelineCreatePipelineRunOptionalParams): Promise; + getPipeline(pipelineName: string, options?: PipelineGetPipelineOptionalParams): Promise; + listPipelinesByWorkspace(options?: PipelineGetPipelinesByWorkspaceOptionalParams): PagedAsyncIterableIterator; } // @public @@ -5446,6 +5696,12 @@ export interface PipelineReference { // @public export type PipelineReferenceType = string; +// @public +export interface PipelineRenamePipelineOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + // @public export type PipelineResource = SubResource & { [property: string]: any; @@ -5485,45 +5741,45 @@ export interface PipelineRun { } // @public -export interface PipelineRunInvokedBy { - readonly id?: string; - readonly invokedByType?: string; - readonly name?: string; +export interface PipelineRunCancelPipelineRunOptionalParams extends coreClient.OperationOptions { + isRecursive?: boolean; } // @public -export interface PipelineRunOperations { - cancelPipelineRun(runId: string, options?: PipelineRunOperationsCancelPipelineRunOptionalParams): Promise; - getPipelineRun(runId: string, options?: PipelineRunOperationsGetPipelineRunOptionalParams): Promise; - queryActivityRuns(pipelineName: string, runId: string, filterParameters: RunFilterParameters, options?: PipelineRunOperationsQueryActivityRunsOptionalParams): Promise; - queryPipelineRunsByWorkspace(filterParameters: RunFilterParameters, options?: PipelineRunOperationsQueryPipelineRunsByWorkspaceOptionalParams): Promise; +export interface PipelineRunGetPipelineRunOptionalParams extends coreClient.OperationOptions { } // @public -export interface PipelineRunOperationsCancelPipelineRunOptionalParams extends coreClient.OperationOptions { - isRecursive?: boolean; -} +export type PipelineRunGetPipelineRunResponse = PipelineRun; // @public -export interface PipelineRunOperationsGetPipelineRunOptionalParams extends coreClient.OperationOptions { +export interface PipelineRunInvokedBy { + readonly id?: string; + readonly invokedByType?: string; + readonly name?: string; } // @public -export type PipelineRunOperationsGetPipelineRunResponse = PipelineRun; +export interface PipelineRunOperations { + cancelPipelineRun(runId: string, options?: PipelineRunCancelPipelineRunOptionalParams): Promise; + getPipelineRun(runId: string, options?: PipelineRunGetPipelineRunOptionalParams): Promise; + queryActivityRuns(pipelineName: string, runId: string, filterParameters: RunFilterParameters, options?: PipelineRunQueryActivityRunsOptionalParams): Promise; + queryPipelineRunsByWorkspace(filterParameters: RunFilterParameters, options?: PipelineRunQueryPipelineRunsByWorkspaceOptionalParams): Promise; +} // @public -export interface PipelineRunOperationsQueryActivityRunsOptionalParams extends coreClient.OperationOptions { +export interface PipelineRunQueryActivityRunsOptionalParams extends coreClient.OperationOptions { } // @public -export type PipelineRunOperationsQueryActivityRunsResponse = ActivityRunsQueryResponse; +export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse; // @public -export interface PipelineRunOperationsQueryPipelineRunsByWorkspaceOptionalParams extends coreClient.OperationOptions { +export interface PipelineRunQueryPipelineRunsByWorkspaceOptionalParams extends coreClient.OperationOptions { } // @public -export type PipelineRunOperationsQueryPipelineRunsByWorkspaceResponse = PipelineRunsQueryResponse; +export type PipelineRunQueryPipelineRunsByWorkspaceResponse = PipelineRunsQueryResponse; // @public export interface PipelineRunsQueryResponse { @@ -5700,7 +5956,7 @@ export interface RedshiftUnloadSettings { export type RelationalSource = CopySource & { type: "RelationalSource"; query?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -5818,7 +6074,7 @@ export type RestSource = CopySource & { paginationRules?: any; httpRequestTimeout?: any; requestInterval?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -5933,7 +6189,7 @@ export type SalesforceServiceCloudSource = CopySource & { type: "SalesforceServiceCloudSource"; query?: any; readBehavior?: SalesforceSourceReadBehavior; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -6180,7 +6436,7 @@ export interface ScheduleTriggerRecurrence { export interface ScriptAction { name: string; parameters?: string; - roles: HdiNodeTypes; + roles: any; uri: string; } @@ -6424,7 +6680,7 @@ export interface SparkBatchJob { result?: SparkBatchJobResultType; scheduler?: SparkScheduler; sparkPoolName?: string; - state?: string; + state?: LivyStates; submitterId?: string; submitterName?: string; tags?: { @@ -6450,6 +6706,88 @@ export interface SparkBatchJobState { terminatedAt?: Date; } +// @public +export interface SparkConfiguration { + annotations?: string[]; + configMergeRule?: { + [propertyName: string]: string; + }; + configs: { + [propertyName: string]: string; + }; + created?: Date; + createdBy?: string; + description?: string; + notes?: string; +} + +// @public +export interface SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams extends coreClient.OperationOptions { + ifMatch?: string; + resumeFrom?: string; + updateIntervalInMs?: number; +} + +// @public +export type SparkConfigurationCreateOrUpdateSparkConfigurationResponse = SparkConfigurationResource; + +// @public +export interface SparkConfigurationDeleteSparkConfigurationOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + +// @public +export interface SparkConfigurationGetSparkConfigurationOptionalParams extends coreClient.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type SparkConfigurationGetSparkConfigurationResponse = SparkConfigurationResource; + +// @public +export interface SparkConfigurationGetSparkConfigurationsByWorkspaceNextOptionalParams extends coreClient.OperationOptions { +} + +// @public +export type SparkConfigurationGetSparkConfigurationsByWorkspaceNextResponse = SparkConfigurationListResponse; + +// @public +export interface SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams extends coreClient.OperationOptions { +} + +// @public +export type SparkConfigurationGetSparkConfigurationsByWorkspaceResponse = SparkConfigurationListResponse; + +// @public +export interface SparkConfigurationListResponse { + nextLink?: string; + value: SparkConfigurationResource[]; +} + +// @public +export interface SparkConfigurationOperations { + beginCreateOrUpdateSparkConfiguration(sparkConfigurationName: string, sparkConfiguration: SparkConfigurationResource, options?: SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams): Promise, SparkConfigurationCreateOrUpdateSparkConfigurationResponse>>; + beginCreateOrUpdateSparkConfigurationAndWait(sparkConfigurationName: string, sparkConfiguration: SparkConfigurationResource, options?: SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams): Promise; + beginDeleteSparkConfiguration(sparkConfigurationName: string, options?: SparkConfigurationDeleteSparkConfigurationOptionalParams): Promise, void>>; + beginDeleteSparkConfigurationAndWait(sparkConfigurationName: string, options?: SparkConfigurationDeleteSparkConfigurationOptionalParams): Promise; + beginRenameSparkConfiguration(sparkConfigurationName: string, request: ArtifactRenameRequest, options?: SparkConfigurationRenameSparkConfigurationOptionalParams): Promise, void>>; + beginRenameSparkConfigurationAndWait(sparkConfigurationName: string, request: ArtifactRenameRequest, options?: SparkConfigurationRenameSparkConfigurationOptionalParams): Promise; + getSparkConfiguration(sparkConfigurationName: string, options?: SparkConfigurationGetSparkConfigurationOptionalParams): Promise; + listSparkConfigurationsByWorkspace(options?: SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams): PagedAsyncIterableIterator; +} + +// @public +export interface SparkConfigurationRenameSparkConfigurationOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + +// @public +export type SparkConfigurationResource = SubResource & { + properties: SparkConfiguration; +}; + // @public export type SparkErrorSource = string; @@ -6457,6 +6795,7 @@ export type SparkErrorSource = string; export interface SparkJobDefinition { [property: string]: any; description?: string; + folder?: SparkJobDefinitionFolder; jobProperties: SparkJobProperties; language?: string; requiredSparkVersion?: string; @@ -6464,79 +6803,84 @@ export interface SparkJobDefinition { } // @public -export interface SparkJobDefinitionOperations { - beginCreateOrUpdateSparkJobDefinition(sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, options?: SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionOptionalParams): Promise, SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionResponse>>; - beginCreateOrUpdateSparkJobDefinitionAndWait(sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, options?: SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionOptionalParams): Promise; - beginDebugSparkJobDefinition(sparkJobDefinitionAzureResource: SparkJobDefinitionResource, options?: SparkJobDefinitionOperationsDebugSparkJobDefinitionOptionalParams): Promise, SparkJobDefinitionOperationsDebugSparkJobDefinitionResponse>>; - beginDebugSparkJobDefinitionAndWait(sparkJobDefinitionAzureResource: SparkJobDefinitionResource, options?: SparkJobDefinitionOperationsDebugSparkJobDefinitionOptionalParams): Promise; - beginDeleteSparkJobDefinition(sparkJobDefinitionName: string, options?: SparkJobDefinitionOperationsDeleteSparkJobDefinitionOptionalParams): Promise, void>>; - beginDeleteSparkJobDefinitionAndWait(sparkJobDefinitionName: string, options?: SparkJobDefinitionOperationsDeleteSparkJobDefinitionOptionalParams): Promise; - beginExecuteSparkJobDefinition(sparkJobDefinitionName: string, options?: SparkJobDefinitionOperationsExecuteSparkJobDefinitionOptionalParams): Promise, SparkJobDefinitionOperationsExecuteSparkJobDefinitionResponse>>; - beginExecuteSparkJobDefinitionAndWait(sparkJobDefinitionName: string, options?: SparkJobDefinitionOperationsExecuteSparkJobDefinitionOptionalParams): Promise; - beginRenameSparkJobDefinition(sparkJobDefinitionName: string, request: ArtifactRenameRequest, options?: SparkJobDefinitionOperationsRenameSparkJobDefinitionOptionalParams): Promise, void>>; - beginRenameSparkJobDefinitionAndWait(sparkJobDefinitionName: string, request: ArtifactRenameRequest, options?: SparkJobDefinitionOperationsRenameSparkJobDefinitionOptionalParams): Promise; - getSparkJobDefinition(sparkJobDefinitionName: string, options?: SparkJobDefinitionOperationsGetSparkJobDefinitionOptionalParams): Promise; - listSparkJobDefinitionsByWorkspace(options?: SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceOptionalParams): PagedAsyncIterableIterator; -} - -// @public -export interface SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { +export interface SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { ifMatch?: string; resumeFrom?: string; updateIntervalInMs?: number; } // @public -export type SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionResponse = SparkJobDefinitionResource; +export type SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse = SparkJobDefinitionResource; // @public -export interface SparkJobDefinitionOperationsDebugSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { +export interface SparkJobDefinitionDebugSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { resumeFrom?: string; updateIntervalInMs?: number; } // @public -export type SparkJobDefinitionOperationsDebugSparkJobDefinitionResponse = SparkBatchJob; +export type SparkJobDefinitionDebugSparkJobDefinitionResponse = SparkBatchJob; // @public -export interface SparkJobDefinitionOperationsDeleteSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { +export interface SparkJobDefinitionDeleteSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { resumeFrom?: string; updateIntervalInMs?: number; } // @public -export interface SparkJobDefinitionOperationsExecuteSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { +export interface SparkJobDefinitionExecuteSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { resumeFrom?: string; updateIntervalInMs?: number; } // @public -export type SparkJobDefinitionOperationsExecuteSparkJobDefinitionResponse = SparkBatchJob; +export type SparkJobDefinitionExecuteSparkJobDefinitionResponse = SparkBatchJob; + +// @public +export interface SparkJobDefinitionFolder { + name?: string; +} // @public -export interface SparkJobDefinitionOperationsGetSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { +export interface SparkJobDefinitionGetSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { ifNoneMatch?: string; } // @public -export type SparkJobDefinitionOperationsGetSparkJobDefinitionResponse = SparkJobDefinitionResource; +export type SparkJobDefinitionGetSparkJobDefinitionResponse = SparkJobDefinitionResource; // @public -export interface SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceNextOptionalParams extends coreClient.OperationOptions { +export interface SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextOptionalParams extends coreClient.OperationOptions { } // @public -export type SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceNextResponse = SparkJobDefinitionsListResponse; +export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse = SparkJobDefinitionsListResponse; // @public -export interface SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceOptionalParams extends coreClient.OperationOptions { +export interface SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceOptionalParams extends coreClient.OperationOptions { } // @public -export type SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceResponse = SparkJobDefinitionsListResponse; +export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse = SparkJobDefinitionsListResponse; // @public -export interface SparkJobDefinitionOperationsRenameSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { +export interface SparkJobDefinitionOperations { + beginCreateOrUpdateSparkJobDefinition(sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams): Promise, SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse>>; + beginCreateOrUpdateSparkJobDefinitionAndWait(sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams): Promise; + beginDebugSparkJobDefinition(sparkJobDefinitionAzureResource: SparkJobDefinitionResource, options?: SparkJobDefinitionDebugSparkJobDefinitionOptionalParams): Promise, SparkJobDefinitionDebugSparkJobDefinitionResponse>>; + beginDebugSparkJobDefinitionAndWait(sparkJobDefinitionAzureResource: SparkJobDefinitionResource, options?: SparkJobDefinitionDebugSparkJobDefinitionOptionalParams): Promise; + beginDeleteSparkJobDefinition(sparkJobDefinitionName: string, options?: SparkJobDefinitionDeleteSparkJobDefinitionOptionalParams): Promise, void>>; + beginDeleteSparkJobDefinitionAndWait(sparkJobDefinitionName: string, options?: SparkJobDefinitionDeleteSparkJobDefinitionOptionalParams): Promise; + beginExecuteSparkJobDefinition(sparkJobDefinitionName: string, options?: SparkJobDefinitionExecuteSparkJobDefinitionOptionalParams): Promise, SparkJobDefinitionExecuteSparkJobDefinitionResponse>>; + beginExecuteSparkJobDefinitionAndWait(sparkJobDefinitionName: string, options?: SparkJobDefinitionExecuteSparkJobDefinitionOptionalParams): Promise; + beginRenameSparkJobDefinition(sparkJobDefinitionName: string, request: ArtifactRenameRequest, options?: SparkJobDefinitionRenameSparkJobDefinitionOptionalParams): Promise, void>>; + beginRenameSparkJobDefinitionAndWait(sparkJobDefinitionName: string, request: ArtifactRenameRequest, options?: SparkJobDefinitionRenameSparkJobDefinitionOptionalParams): Promise; + getSparkJobDefinition(sparkJobDefinitionName: string, options?: SparkJobDefinitionGetSparkJobDefinitionOptionalParams): Promise; + listSparkJobDefinitionsByWorkspace(options?: SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceOptionalParams): PagedAsyncIterableIterator; +} + +// @public +export interface SparkJobDefinitionRenameSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { resumeFrom?: string; updateIntervalInMs?: number; } @@ -6691,8 +7035,10 @@ export type SparkThriftTransportProtocol = string; // @public export interface SqlConnection { [property: string]: any; - name: string; - type: SqlConnectionType; + databaseName?: string; + name?: string; + poolName?: string; + type?: SqlConnectionType; } // @public @@ -6819,75 +7165,82 @@ export interface SqlScript { [property: string]: any; content: SqlScriptContent; description?: string; + folder?: SqlScriptFolder; type?: SqlScriptType; } // @public export interface SqlScriptContent { [property: string]: any; - currentConnection: SqlConnection; + currentConnection?: SqlConnection; metadata?: SqlScriptMetadata; query: string; + resultLimit?: number; } // @public -export interface SqlScriptMetadata { - [property: string]: any; - language?: string; +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreClient.OperationOptions { + ifMatch?: string; + resumeFrom?: string; + updateIntervalInMs?: number; } // @public -export interface SqlScriptOperations { - beginCreateOrUpdateSqlScript(sqlScriptName: string, sqlScript: SqlScriptResource, options?: SqlScriptOperationsCreateOrUpdateSqlScriptOptionalParams): Promise, SqlScriptOperationsCreateOrUpdateSqlScriptResponse>>; - beginCreateOrUpdateSqlScriptAndWait(sqlScriptName: string, sqlScript: SqlScriptResource, options?: SqlScriptOperationsCreateOrUpdateSqlScriptOptionalParams): Promise; - beginDeleteSqlScript(sqlScriptName: string, options?: SqlScriptOperationsDeleteSqlScriptOptionalParams): Promise, void>>; - beginDeleteSqlScriptAndWait(sqlScriptName: string, options?: SqlScriptOperationsDeleteSqlScriptOptionalParams): Promise; - beginRenameSqlScript(sqlScriptName: string, request: ArtifactRenameRequest, options?: SqlScriptOperationsRenameSqlScriptOptionalParams): Promise, void>>; - beginRenameSqlScriptAndWait(sqlScriptName: string, request: ArtifactRenameRequest, options?: SqlScriptOperationsRenameSqlScriptOptionalParams): Promise; - getSqlScript(sqlScriptName: string, options?: SqlScriptOperationsGetSqlScriptOptionalParams): Promise; - listSqlScriptsByWorkspace(options?: SqlScriptOperationsGetSqlScriptsByWorkspaceOptionalParams): PagedAsyncIterableIterator; -} +export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource; // @public -export interface SqlScriptOperationsCreateOrUpdateSqlScriptOptionalParams extends coreClient.OperationOptions { - ifMatch?: string; +export interface SqlScriptDeleteSqlScriptOptionalParams extends coreClient.OperationOptions { resumeFrom?: string; updateIntervalInMs?: number; } // @public -export type SqlScriptOperationsCreateOrUpdateSqlScriptResponse = SqlScriptResource; +export interface SqlScriptFolder { + name?: string; +} // @public -export interface SqlScriptOperationsDeleteSqlScriptOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; +export interface SqlScriptGetSqlScriptOptionalParams extends coreClient.OperationOptions { + ifNoneMatch?: string; } // @public -export interface SqlScriptOperationsGetSqlScriptOptionalParams extends coreClient.OperationOptions { - ifNoneMatch?: string; +export type SqlScriptGetSqlScriptResponse = SqlScriptResource; + +// @public +export interface SqlScriptGetSqlScriptsByWorkspaceNextOptionalParams extends coreClient.OperationOptions { } // @public -export type SqlScriptOperationsGetSqlScriptResponse = SqlScriptResource; +export type SqlScriptGetSqlScriptsByWorkspaceNextResponse = SqlScriptsListResponse; // @public -export interface SqlScriptOperationsGetSqlScriptsByWorkspaceNextOptionalParams extends coreClient.OperationOptions { +export interface SqlScriptGetSqlScriptsByWorkspaceOptionalParams extends coreClient.OperationOptions { } // @public -export type SqlScriptOperationsGetSqlScriptsByWorkspaceNextResponse = SqlScriptsListResponse; +export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse; // @public -export interface SqlScriptOperationsGetSqlScriptsByWorkspaceOptionalParams extends coreClient.OperationOptions { +export interface SqlScriptMetadata { + [property: string]: any; + language?: string; } // @public -export type SqlScriptOperationsGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse; +export interface SqlScriptOperations { + beginCreateOrUpdateSqlScript(sqlScriptName: string, sqlScript: SqlScriptResource, options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams): Promise, SqlScriptCreateOrUpdateSqlScriptResponse>>; + beginCreateOrUpdateSqlScriptAndWait(sqlScriptName: string, sqlScript: SqlScriptResource, options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams): Promise; + beginDeleteSqlScript(sqlScriptName: string, options?: SqlScriptDeleteSqlScriptOptionalParams): Promise, void>>; + beginDeleteSqlScriptAndWait(sqlScriptName: string, options?: SqlScriptDeleteSqlScriptOptionalParams): Promise; + beginRenameSqlScript(sqlScriptName: string, request: ArtifactRenameRequest, options?: SqlScriptRenameSqlScriptOptionalParams): Promise, void>>; + beginRenameSqlScriptAndWait(sqlScriptName: string, request: ArtifactRenameRequest, options?: SqlScriptRenameSqlScriptOptionalParams): Promise; + getSqlScript(sqlScriptName: string, options?: SqlScriptGetSqlScriptOptionalParams): Promise; + listSqlScriptsByWorkspace(options?: SqlScriptGetSqlScriptsByWorkspaceOptionalParams): PagedAsyncIterableIterator; +} // @public -export interface SqlScriptOperationsRenameSqlScriptOptionalParams extends coreClient.OperationOptions { +export interface SqlScriptRenameSqlScriptOptionalParams extends coreClient.OperationOptions { resumeFrom?: string; updateIntervalInMs?: number; } @@ -7216,13 +7569,13 @@ export interface SynapseSparkJobReference { // @public export type TabularSource = CopySource & { - type: "TabularSource" | "AzureTableSource" | "InformixSource" | "Db2Source" | "OdbcSource" | "MySqlSource" | "PostgreSqlSource" | "SybaseSource" | "SapBwSource" | "SalesforceSource" | "SapCloudForCustomerSource" | "SapEccSource" | "SapHanaSource" | "SapOpenHubSource" | "SapTableSource" | "SqlSource" | "SqlServerSource" | "AzureSqlSource" | "SqlMISource" | "SqlDWSource" | "AzureMySqlSource" | "TeradataSource" | "CassandraSource" | "AmazonMWSSource" | "AzurePostgreSqlSource" | "ConcurSource" | "CouchbaseSource" | "DrillSource" | "EloquaSource" | "GoogleBigQuerySource" | "GreenplumSource" | "HBaseSource" | "HiveSource" | "HubspotSource" | "ImpalaSource" | "JiraSource" | "MagentoSource" | "MariaDBSource" | "AzureMariaDBSource" | "MarketoSource" | "PaypalSource" | "PhoenixSource" | "PrestoSource" | "QuickBooksSource" | "ServiceNowSource" | "ShopifySource" | "SparkSource" | "SquareSource" | "XeroSource" | "ZohoSource" | "NetezzaSource" | "VerticaSource" | "SalesforceMarketingCloudSource" | "ResponsysSource" | "DynamicsAXSource" | "OracleServiceCloudSource" | "GoogleAdWordsSource" | "AmazonRedshiftSource"; + type: "TabularSource" | "AzureTableSource" | "InformixSource" | "Db2Source" | "OdbcSource" | "MySqlSource" | "PostgreSqlSource" | "SybaseSource" | "SapBwSource" | "SalesforceSource" | "SapCloudForCustomerSource" | "SapEccSource" | "SapHanaSource" | "SapOpenHubSource" | "SapTableSource" | "SqlSource" | "SqlServerSource" | "AmazonRdsForSqlServerSource" | "AzureSqlSource" | "SqlMISource" | "SqlDWSource" | "AzureMySqlSource" | "TeradataSource" | "CassandraSource" | "AmazonMWSSource" | "AzurePostgreSqlSource" | "ConcurSource" | "CouchbaseSource" | "DrillSource" | "EloquaSource" | "GoogleBigQuerySource" | "GreenplumSource" | "HBaseSource" | "HiveSource" | "HubspotSource" | "ImpalaSource" | "JiraSource" | "MagentoSource" | "MariaDBSource" | "AzureMariaDBSource" | "MarketoSource" | "PaypalSource" | "PhoenixSource" | "PrestoSource" | "QuickBooksSource" | "ServiceNowSource" | "ShopifySource" | "SparkSource" | "SquareSource" | "XeroSource" | "ZohoSource" | "NetezzaSource" | "VerticaSource" | "SalesforceMarketingCloudSource" | "ResponsysSource" | "DynamicsAXSource" | "OracleServiceCloudSource" | "GoogleAdWordsSource" | "AmazonRedshiftSource"; queryTimeout?: any; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public (undocumented) -export type TabularSourceUnion = TabularSource | AzureTableSource | InformixSource | Db2Source | OdbcSource | MySqlSource | PostgreSqlSource | SybaseSource | SapBwSource | SalesforceSource | SapCloudForCustomerSource | SapEccSource | SapHanaSource | SapOpenHubSource | SapTableSource | SqlSource | SqlServerSource | AzureSqlSource | SqlMISource | SqlDWSource | AzureMySqlSource | TeradataSource | CassandraSource | AmazonMWSSource | AzurePostgreSqlSource | ConcurSource | CouchbaseSource | DrillSource | EloquaSource | GoogleBigQuerySource | GreenplumSource | HBaseSource | HiveSource | HubspotSource | ImpalaSource | JiraSource | MagentoSource | MariaDBSource | AzureMariaDBSource | MarketoSource | PaypalSource | PhoenixSource | PrestoSource | QuickBooksSource | ServiceNowSource | ShopifySource | SparkSource | SquareSource | XeroSource | ZohoSource | NetezzaSource | VerticaSource | SalesforceMarketingCloudSource | ResponsysSource | DynamicsAXSource | OracleServiceCloudSource | GoogleAdWordsSource | AmazonRedshiftSource; +export type TabularSourceUnion = TabularSource | AzureTableSource | InformixSource | Db2Source | OdbcSource | MySqlSource | PostgreSqlSource | SybaseSource | SapBwSource | SalesforceSource | SapCloudForCustomerSource | SapEccSource | SapHanaSource | SapOpenHubSource | SapTableSource | SqlSource | SqlServerSource | AmazonRdsForSqlServerSource | AzureSqlSource | SqlMISource | SqlDWSource | AzureMySqlSource | TeradataSource | CassandraSource | AmazonMWSSource | AzurePostgreSqlSource | ConcurSource | CouchbaseSource | DrillSource | EloquaSource | GoogleBigQuerySource | GreenplumSource | HBaseSource | HiveSource | HubspotSource | ImpalaSource | JiraSource | MagentoSource | MariaDBSource | AzureMariaDBSource | MarketoSource | PaypalSource | PhoenixSource | PrestoSource | QuickBooksSource | ServiceNowSource | ShopifySource | SparkSource | SquareSource | XeroSource | ZohoSource | NetezzaSource | VerticaSource | SalesforceMarketingCloudSource | ResponsysSource | DynamicsAXSource | OracleServiceCloudSource | GoogleAdWordsSource | AmazonRedshiftSource; // @public export type TabularTranslator = CopyTranslator & { @@ -7325,119 +7678,89 @@ export interface Trigger { } // @public -export interface TriggerDependencyProvisioningStatus { - provisioningStatus: string; - triggerName: string; -} - -// @public -export type TriggerDependencyReference = DependencyReference & { - type: "TriggerDependencyReference" | "TumblingWindowTriggerDependencyReference"; - referenceTrigger: TriggerReference; -}; - -// @public (undocumented) -export type TriggerDependencyReferenceUnion = TriggerDependencyReference | TumblingWindowTriggerDependencyReference; - -// @public -export interface TriggerListResponse { - nextLink?: string; - value: TriggerResource[]; -} - -// @public -export interface TriggerOperations { - beginCreateOrUpdateTrigger(triggerName: string, trigger: TriggerResource, options?: TriggerOperationsCreateOrUpdateTriggerOptionalParams): Promise, TriggerOperationsCreateOrUpdateTriggerResponse>>; - beginCreateOrUpdateTriggerAndWait(triggerName: string, trigger: TriggerResource, options?: TriggerOperationsCreateOrUpdateTriggerOptionalParams): Promise; - beginDeleteTrigger(triggerName: string, options?: TriggerOperationsDeleteTriggerOptionalParams): Promise, void>>; - beginDeleteTriggerAndWait(triggerName: string, options?: TriggerOperationsDeleteTriggerOptionalParams): Promise; - beginStartTrigger(triggerName: string, options?: TriggerOperationsStartTriggerOptionalParams): Promise, void>>; - beginStartTriggerAndWait(triggerName: string, options?: TriggerOperationsStartTriggerOptionalParams): Promise; - beginStopTrigger(triggerName: string, options?: TriggerOperationsStopTriggerOptionalParams): Promise, void>>; - beginStopTriggerAndWait(triggerName: string, options?: TriggerOperationsStopTriggerOptionalParams): Promise; - beginSubscribeTriggerToEvents(triggerName: string, options?: TriggerOperationsSubscribeTriggerToEventsOptionalParams): Promise, TriggerOperationsSubscribeTriggerToEventsResponse>>; - beginSubscribeTriggerToEventsAndWait(triggerName: string, options?: TriggerOperationsSubscribeTriggerToEventsOptionalParams): Promise; - beginUnsubscribeTriggerFromEvents(triggerName: string, options?: TriggerOperationsUnsubscribeTriggerFromEventsOptionalParams): Promise, TriggerOperationsUnsubscribeTriggerFromEventsResponse>>; - beginUnsubscribeTriggerFromEventsAndWait(triggerName: string, options?: TriggerOperationsUnsubscribeTriggerFromEventsOptionalParams): Promise; - getEventSubscriptionStatus(triggerName: string, options?: TriggerOperationsGetEventSubscriptionStatusOptionalParams): Promise; - getTrigger(triggerName: string, options?: TriggerOperationsGetTriggerOptionalParams): Promise; - listTriggersByWorkspace(options?: TriggerOperationsGetTriggersByWorkspaceOptionalParams): PagedAsyncIterableIterator; -} - -// @public -export interface TriggerOperationsCreateOrUpdateTriggerOptionalParams extends coreClient.OperationOptions { +export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreClient.OperationOptions { ifMatch?: string; resumeFrom?: string; updateIntervalInMs?: number; } // @public -export type TriggerOperationsCreateOrUpdateTriggerResponse = TriggerResource; +export type TriggerCreateOrUpdateTriggerResponse = TriggerResource; // @public -export interface TriggerOperationsDeleteTriggerOptionalParams extends coreClient.OperationOptions { +export interface TriggerDeleteTriggerOptionalParams extends coreClient.OperationOptions { resumeFrom?: string; updateIntervalInMs?: number; } // @public -export interface TriggerOperationsGetEventSubscriptionStatusOptionalParams extends coreClient.OperationOptions { +export interface TriggerDependencyProvisioningStatus { + provisioningStatus: string; + triggerName: string; } // @public -export type TriggerOperationsGetEventSubscriptionStatusResponse = TriggerSubscriptionOperationStatus; - -// @public -export interface TriggerOperationsGetTriggerOptionalParams extends coreClient.OperationOptions { - ifNoneMatch?: string; -} +export type TriggerDependencyReference = DependencyReference & { + type: "TriggerDependencyReference" | "TumblingWindowTriggerDependencyReference"; + referenceTrigger: TriggerReference; +}; -// @public -export type TriggerOperationsGetTriggerResponse = TriggerResource; +// @public (undocumented) +export type TriggerDependencyReferenceUnion = TriggerDependencyReference | TumblingWindowTriggerDependencyReference; // @public -export interface TriggerOperationsGetTriggersByWorkspaceNextOptionalParams extends coreClient.OperationOptions { +export interface TriggerGetEventSubscriptionStatusOptionalParams extends coreClient.OperationOptions { } // @public -export type TriggerOperationsGetTriggersByWorkspaceNextResponse = TriggerListResponse; +export type TriggerGetEventSubscriptionStatusResponse = TriggerSubscriptionOperationStatus; // @public -export interface TriggerOperationsGetTriggersByWorkspaceOptionalParams extends coreClient.OperationOptions { +export interface TriggerGetTriggerOptionalParams extends coreClient.OperationOptions { + ifNoneMatch?: string; } // @public -export type TriggerOperationsGetTriggersByWorkspaceResponse = TriggerListResponse; +export type TriggerGetTriggerResponse = TriggerResource; // @public -export interface TriggerOperationsStartTriggerOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; +export interface TriggerGetTriggersByWorkspaceNextOptionalParams extends coreClient.OperationOptions { } // @public -export interface TriggerOperationsStopTriggerOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; -} +export type TriggerGetTriggersByWorkspaceNextResponse = TriggerListResponse; // @public -export interface TriggerOperationsSubscribeTriggerToEventsOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; +export interface TriggerGetTriggersByWorkspaceOptionalParams extends coreClient.OperationOptions { } // @public -export type TriggerOperationsSubscribeTriggerToEventsResponse = TriggerSubscriptionOperationStatus; +export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse; // @public -export interface TriggerOperationsUnsubscribeTriggerFromEventsOptionalParams extends coreClient.OperationOptions { - resumeFrom?: string; - updateIntervalInMs?: number; +export interface TriggerListResponse { + nextLink?: string; + value: TriggerResource[]; } // @public -export type TriggerOperationsUnsubscribeTriggerFromEventsResponse = TriggerSubscriptionOperationStatus; +export interface TriggerOperations { + beginCreateOrUpdateTrigger(triggerName: string, trigger: TriggerResource, options?: TriggerCreateOrUpdateTriggerOptionalParams): Promise, TriggerCreateOrUpdateTriggerResponse>>; + beginCreateOrUpdateTriggerAndWait(triggerName: string, trigger: TriggerResource, options?: TriggerCreateOrUpdateTriggerOptionalParams): Promise; + beginDeleteTrigger(triggerName: string, options?: TriggerDeleteTriggerOptionalParams): Promise, void>>; + beginDeleteTriggerAndWait(triggerName: string, options?: TriggerDeleteTriggerOptionalParams): Promise; + beginStartTrigger(triggerName: string, options?: TriggerStartTriggerOptionalParams): Promise, void>>; + beginStartTriggerAndWait(triggerName: string, options?: TriggerStartTriggerOptionalParams): Promise; + beginStopTrigger(triggerName: string, options?: TriggerStopTriggerOptionalParams): Promise, void>>; + beginStopTriggerAndWait(triggerName: string, options?: TriggerStopTriggerOptionalParams): Promise; + beginSubscribeTriggerToEvents(triggerName: string, options?: TriggerSubscribeTriggerToEventsOptionalParams): Promise, TriggerSubscribeTriggerToEventsResponse>>; + beginSubscribeTriggerToEventsAndWait(triggerName: string, options?: TriggerSubscribeTriggerToEventsOptionalParams): Promise; + beginUnsubscribeTriggerFromEvents(triggerName: string, options?: TriggerUnsubscribeTriggerFromEventsOptionalParams): Promise, TriggerUnsubscribeTriggerFromEventsResponse>>; + beginUnsubscribeTriggerFromEventsAndWait(triggerName: string, options?: TriggerUnsubscribeTriggerFromEventsOptionalParams): Promise; + getEventSubscriptionStatus(triggerName: string, options?: TriggerGetEventSubscriptionStatusOptionalParams): Promise; + getTrigger(triggerName: string, options?: TriggerGetTriggerOptionalParams): Promise; + listTriggersByWorkspace(options?: TriggerGetTriggersByWorkspaceOptionalParams): PagedAsyncIterableIterator; +} // @public export interface TriggerPipelineReference { @@ -7479,25 +7802,25 @@ export interface TriggerRun { } // @public -export interface TriggerRunOperations { - cancelTriggerInstance(triggerName: string, runId: string, options?: TriggerRunOperationsCancelTriggerInstanceOptionalParams): Promise; - queryTriggerRunsByWorkspace(filterParameters: RunFilterParameters, options?: TriggerRunOperationsQueryTriggerRunsByWorkspaceOptionalParams): Promise; - rerunTriggerInstance(triggerName: string, runId: string, options?: TriggerRunOperationsRerunTriggerInstanceOptionalParams): Promise; +export interface TriggerRunCancelTriggerInstanceOptionalParams extends coreClient.OperationOptions { } // @public -export interface TriggerRunOperationsCancelTriggerInstanceOptionalParams extends coreClient.OperationOptions { +export interface TriggerRunOperations { + cancelTriggerInstance(triggerName: string, runId: string, options?: TriggerRunCancelTriggerInstanceOptionalParams): Promise; + queryTriggerRunsByWorkspace(filterParameters: RunFilterParameters, options?: TriggerRunQueryTriggerRunsByWorkspaceOptionalParams): Promise; + rerunTriggerInstance(triggerName: string, runId: string, options?: TriggerRunRerunTriggerInstanceOptionalParams): Promise; } // @public -export interface TriggerRunOperationsQueryTriggerRunsByWorkspaceOptionalParams extends coreClient.OperationOptions { +export interface TriggerRunQueryTriggerRunsByWorkspaceOptionalParams extends coreClient.OperationOptions { } // @public -export type TriggerRunOperationsQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResponse; +export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResponse; // @public -export interface TriggerRunOperationsRerunTriggerInstanceOptionalParams extends coreClient.OperationOptions { +export interface TriggerRunRerunTriggerInstanceOptionalParams extends coreClient.OperationOptions { } // @public @@ -7512,6 +7835,27 @@ export type TriggerRunStatus = string; // @public export type TriggerRuntimeState = string; +// @public +export interface TriggerStartTriggerOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + +// @public +export interface TriggerStopTriggerOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + +// @public +export interface TriggerSubscribeTriggerToEventsOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + +// @public +export type TriggerSubscribeTriggerToEventsResponse = TriggerSubscriptionOperationStatus; + // @public export interface TriggerSubscriptionOperationStatus { readonly status?: EventSubscriptionStatus; @@ -7521,6 +7865,15 @@ export interface TriggerSubscriptionOperationStatus { // @public (undocumented) export type TriggerUnion = Trigger | RerunTumblingWindowTrigger | MultiplePipelineTriggerUnion | TumblingWindowTrigger | ChainingTrigger; +// @public +export interface TriggerUnsubscribeTriggerFromEventsOptionalParams extends coreClient.OperationOptions { + resumeFrom?: string; + updateIntervalInMs?: number; +} + +// @public +export type TriggerUnsubscribeTriggerFromEventsResponse = TriggerSubscriptionOperationStatus; + // @public export type TumblingWindowFrequency = string; @@ -7704,7 +8057,7 @@ export type WebLinkedServiceTypePropertiesUnion = WebLinkedServiceTypeProperties // @public export type WebSource = CopySource & { type: "WebSource"; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public @@ -7739,6 +8092,13 @@ export type Workspace = TrackedResource & { readonly adlaResourceId?: string; }; +// @public +export interface WorkspaceGetOptionalParams extends coreClient.OperationOptions { +} + +// @public +export type WorkspaceGetResponse = Workspace; + // @public export interface WorkspaceGitRepoManagement { getGitHubAccessToken(gitHubAccessTokenRequest: GitHubAccessTokenRequest, options?: WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams): Promise; @@ -7767,16 +8127,9 @@ export interface WorkspaceKeyDetails { // @public export interface WorkspaceOperations { - get(options?: WorkspaceOperationsGetOptionalParams): Promise; -} - -// @public -export interface WorkspaceOperationsGetOptionalParams extends coreClient.OperationOptions { + get(options?: WorkspaceGetOptionalParams): Promise; } -// @public -export type WorkspaceOperationsGetResponse = Workspace; - // @public export interface WorkspaceRepositoryConfiguration { accountName?: string; @@ -7849,7 +8202,7 @@ export type XmlSource = CopySource & { type: "XmlSource"; storeSettings?: StoreReadSettingsUnion; formatSettings?: XmlReadSettings; - additionalColumns?: AdditionalColumns[]; + additionalColumns?: any; }; // @public diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClient.ts b/sdk/synapse/synapse-artifacts/src/artifactsClient.ts index 58a8c7b86407..8dc16ebca67d 100644 --- a/sdk/synapse/synapse-artifacts/src/artifactsClient.ts +++ b/sdk/synapse/synapse-artifacts/src/artifactsClient.ts @@ -8,42 +8,50 @@ import * as coreAuth from "@azure/core-auth"; import { - LinkedServiceOperationsImpl, + KqlScriptsImpl, + KqlScriptOperationsImpl, + SparkConfigurationOperationsImpl, + BigDataPoolsImpl, + DataFlowOperationsImpl, + DataFlowDebugSessionImpl, DatasetOperationsImpl, + WorkspaceGitRepoManagementImpl, + IntegrationRuntimesImpl, + LibraryImpl, + LinkedServiceOperationsImpl, + NotebookOperationsImpl, + NotebookOperationResultImpl, PipelineOperationsImpl, PipelineRunOperationsImpl, - TriggerOperationsImpl, - TriggerRunOperationsImpl, - DataFlowOperationsImpl, - DataFlowDebugSessionImpl, - SqlScriptOperationsImpl, SparkJobDefinitionOperationsImpl, - NotebookOperationsImpl, - WorkspaceOperationsImpl, SqlPoolsImpl, - BigDataPoolsImpl, - IntegrationRuntimesImpl, - LibraryImpl, - WorkspaceGitRepoManagementImpl + SqlScriptOperationsImpl, + TriggerOperationsImpl, + TriggerRunOperationsImpl, + WorkspaceOperationsImpl } from "./operations"; import { - LinkedServiceOperations, + KqlScripts, + KqlScriptOperations, + SparkConfigurationOperations, + BigDataPools, + DataFlowOperations, + DataFlowDebugSession, DatasetOperations, + WorkspaceGitRepoManagement, + IntegrationRuntimes, + Library, + LinkedServiceOperations, + NotebookOperations, + NotebookOperationResult, PipelineOperations, PipelineRunOperations, - TriggerOperations, - TriggerRunOperations, - DataFlowOperations, - DataFlowDebugSession, - SqlScriptOperations, SparkJobDefinitionOperations, - NotebookOperations, - WorkspaceOperations, SqlPools, - BigDataPools, - IntegrationRuntimes, - Library, - WorkspaceGitRepoManagement + SqlScriptOperations, + TriggerOperations, + TriggerRunOperations, + WorkspaceOperations } from "./operationsInterfaces"; import { ArtifactsClientContext } from "./artifactsClientContext"; import { ArtifactsClientOptionalParams } from "./models"; @@ -62,42 +70,52 @@ export class ArtifactsClient extends ArtifactsClientContext { options?: ArtifactsClientOptionalParams ) { super(credentials, endpoint, options); - this.linkedServiceOperations = new LinkedServiceOperationsImpl(this); + this.kqlScripts = new KqlScriptsImpl(this); + this.kqlScriptOperations = new KqlScriptOperationsImpl(this); + this.sparkConfigurationOperations = new SparkConfigurationOperationsImpl( + this + ); + this.bigDataPools = new BigDataPoolsImpl(this); + this.dataFlowOperations = new DataFlowOperationsImpl(this); + this.dataFlowDebugSession = new DataFlowDebugSessionImpl(this); this.datasetOperations = new DatasetOperationsImpl(this); + this.workspaceGitRepoManagement = new WorkspaceGitRepoManagementImpl(this); + this.integrationRuntimes = new IntegrationRuntimesImpl(this); + this.library = new LibraryImpl(this); + this.linkedServiceOperations = new LinkedServiceOperationsImpl(this); + this.notebookOperations = new NotebookOperationsImpl(this); + this.notebookOperationResult = new NotebookOperationResultImpl(this); this.pipelineOperations = new PipelineOperationsImpl(this); this.pipelineRunOperations = new PipelineRunOperationsImpl(this); - this.triggerOperations = new TriggerOperationsImpl(this); - this.triggerRunOperations = new TriggerRunOperationsImpl(this); - this.dataFlowOperations = new DataFlowOperationsImpl(this); - this.dataFlowDebugSession = new DataFlowDebugSessionImpl(this); - this.sqlScriptOperations = new SqlScriptOperationsImpl(this); this.sparkJobDefinitionOperations = new SparkJobDefinitionOperationsImpl( this ); - this.notebookOperations = new NotebookOperationsImpl(this); - this.workspaceOperations = new WorkspaceOperationsImpl(this); this.sqlPools = new SqlPoolsImpl(this); - this.bigDataPools = new BigDataPoolsImpl(this); - this.integrationRuntimes = new IntegrationRuntimesImpl(this); - this.library = new LibraryImpl(this); - this.workspaceGitRepoManagement = new WorkspaceGitRepoManagementImpl(this); + this.sqlScriptOperations = new SqlScriptOperationsImpl(this); + this.triggerOperations = new TriggerOperationsImpl(this); + this.triggerRunOperations = new TriggerRunOperationsImpl(this); + this.workspaceOperations = new WorkspaceOperationsImpl(this); } - linkedServiceOperations: LinkedServiceOperations; + kqlScripts: KqlScripts; + kqlScriptOperations: KqlScriptOperations; + sparkConfigurationOperations: SparkConfigurationOperations; + bigDataPools: BigDataPools; + dataFlowOperations: DataFlowOperations; + dataFlowDebugSession: DataFlowDebugSession; datasetOperations: DatasetOperations; + workspaceGitRepoManagement: WorkspaceGitRepoManagement; + integrationRuntimes: IntegrationRuntimes; + library: Library; + linkedServiceOperations: LinkedServiceOperations; + notebookOperations: NotebookOperations; + notebookOperationResult: NotebookOperationResult; pipelineOperations: PipelineOperations; pipelineRunOperations: PipelineRunOperations; + sparkJobDefinitionOperations: SparkJobDefinitionOperations; + sqlPools: SqlPools; + sqlScriptOperations: SqlScriptOperations; triggerOperations: TriggerOperations; triggerRunOperations: TriggerRunOperations; - dataFlowOperations: DataFlowOperations; - dataFlowDebugSession: DataFlowDebugSession; - sqlScriptOperations: SqlScriptOperations; - sparkJobDefinitionOperations: SparkJobDefinitionOperations; - notebookOperations: NotebookOperations; workspaceOperations: WorkspaceOperations; - sqlPools: SqlPools; - bigDataPools: BigDataPools; - integrationRuntimes: IntegrationRuntimes; - library: Library; - workspaceGitRepoManagement: WorkspaceGitRepoManagement; } diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts index 0aac2b616cd5..95b50e39c523 100644 --- a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts +++ b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts @@ -12,7 +12,6 @@ import { ArtifactsClientOptionalParams } from "./models"; export class ArtifactsClientContext extends coreClient.ServiceClient { endpoint: string; - apiVersion: string; /** * Initializes a new instance of the ArtifactsClientContext class. @@ -62,8 +61,5 @@ export class ArtifactsClientContext extends coreClient.ServiceClient { super(optionsWithDefaults); // Parameter assignments this.endpoint = endpoint; - - // Assigning values to Constant parameters - this.apiVersion = options.apiVersion || "2019-06-01-preview"; } } diff --git a/sdk/synapse/synapse-artifacts/src/models/index.ts b/sdk/synapse/synapse-artifacts/src/models/index.ts index 2d8f3c8ee07e..d3aaaba7d935 100644 --- a/sdk/synapse/synapse-artifacts/src/models/index.ts +++ b/sdk/synapse/synapse-artifacts/src/models/index.ts @@ -8,6 +8,108 @@ import * as coreClient from "@azure/core-client"; +export type DataFlowUnion = DataFlow | MappingDataFlow; +export type IntegrationRuntimeUnion = + | IntegrationRuntime + | ManagedIntegrationRuntime + | SelfHostedIntegrationRuntime; +export type DatasetUnion = + | Dataset + | AmazonS3Dataset + | AvroDataset + | ExcelDataset + | ParquetDataset + | DelimitedTextDataset + | JsonDataset + | XmlDataset + | OrcDataset + | BinaryDataset + | AzureBlobDataset + | AzureTableDataset + | AzureSqlTableDataset + | AzureSqlMITableDataset + | AzureSqlDWTableDataset + | CassandraTableDataset + | CustomDataset + | CosmosDbSqlApiCollectionDataset + | DocumentDbCollectionDataset + | DynamicsEntityDataset + | DynamicsCrmEntityDataset + | CommonDataServiceForAppsEntityDataset + | AzureDataLakeStoreDataset + | AzureBlobFSDataset + | Office365Dataset + | FileShareDataset + | MongoDbCollectionDataset + | MongoDbAtlasCollectionDataset + | MongoDbV2CollectionDataset + | CosmosDbMongoDbApiCollectionDataset + | ODataResourceDataset + | OracleTableDataset + | AmazonRdsForOracleTableDataset + | TeradataTableDataset + | AzureMySqlTableDataset + | AmazonRedshiftTableDataset + | Db2TableDataset + | RelationalTableDataset + | InformixTableDataset + | OdbcTableDataset + | MySqlTableDataset + | PostgreSqlTableDataset + | MicrosoftAccessTableDataset + | SalesforceObjectDataset + | SalesforceServiceCloudObjectDataset + | SybaseTableDataset + | SapBwCubeDataset + | SapCloudForCustomerResourceDataset + | SapEccResourceDataset + | SapHanaTableDataset + | SapOpenHubTableDataset + | SqlServerTableDataset + | AmazonRdsForSqlServerTableDataset + | RestResourceDataset + | SapTableResourceDataset + | WebTableDataset + | AzureSearchIndexDataset + | HttpDataset + | AmazonMWSObjectDataset + | AzurePostgreSqlTableDataset + | ConcurObjectDataset + | CouchbaseTableDataset + | DrillTableDataset + | EloquaObjectDataset + | GoogleBigQueryObjectDataset + | GreenplumTableDataset + | HBaseObjectDataset + | HiveObjectDataset + | HubspotObjectDataset + | ImpalaObjectDataset + | JiraObjectDataset + | MagentoObjectDataset + | MariaDBTableDataset + | AzureMariaDBTableDataset + | MarketoObjectDataset + | PaypalObjectDataset + | PhoenixObjectDataset + | PrestoObjectDataset + | QuickBooksObjectDataset + | ServiceNowObjectDataset + | ShopifyObjectDataset + | SparkObjectDataset + | SquareObjectDataset + | XeroObjectDataset + | ZohoObjectDataset + | NetezzaTableDataset + | VerticaTableDataset + | SalesforceMarketingCloudObjectDataset + | ResponsysObjectDataset + | DynamicsAXResourceDataset + | OracleServiceCloudObjectDataset + | AzureDataExplorerTableDataset + | GoogleAdWordsObjectDataset + | SnowflakeDataset + | SharePointOnlineListResourceDataset + | AzureDatabricksDeltaLakeDataset; export type LinkedServiceUnion = | LinkedService | AzureStorageLinkedService @@ -15,6 +117,7 @@ export type LinkedServiceUnion = | AzureTableStorageLinkedService | AzureSqlDWLinkedService | SqlServerLinkedService + | AmazonRdsForSqlServerLinkedService | AzureSqlDatabaseLinkedService | AzureSqlMILinkedService | AzureBatchLinkedService @@ -28,6 +131,7 @@ export type LinkedServiceUnion = | AzureFileStorageLinkedService | GoogleCloudStorageLinkedService | OracleLinkedService + | AmazonRdsForOracleLinkedService | AzureMySqlLinkedService | MySqlLinkedService | PostgreSqlLinkedService @@ -108,101 +212,6 @@ export type LinkedServiceUnion = | AzureFunctionLinkedService | SnowflakeLinkedService | SharePointOnlineListLinkedService; -export type DatasetUnion = - | Dataset - | AmazonS3Dataset - | AvroDataset - | ExcelDataset - | ParquetDataset - | DelimitedTextDataset - | JsonDataset - | XmlDataset - | OrcDataset - | BinaryDataset - | AzureBlobDataset - | AzureTableDataset - | AzureSqlTableDataset - | AzureSqlMITableDataset - | AzureSqlDWTableDataset - | CassandraTableDataset - | CustomDataset - | CosmosDbSqlApiCollectionDataset - | DocumentDbCollectionDataset - | DynamicsEntityDataset - | DynamicsCrmEntityDataset - | CommonDataServiceForAppsEntityDataset - | AzureDataLakeStoreDataset - | AzureBlobFSDataset - | Office365Dataset - | FileShareDataset - | MongoDbCollectionDataset - | MongoDbAtlasCollectionDataset - | MongoDbV2CollectionDataset - | CosmosDbMongoDbApiCollectionDataset - | ODataResourceDataset - | OracleTableDataset - | TeradataTableDataset - | AzureMySqlTableDataset - | AmazonRedshiftTableDataset - | Db2TableDataset - | RelationalTableDataset - | InformixTableDataset - | OdbcTableDataset - | MySqlTableDataset - | PostgreSqlTableDataset - | MicrosoftAccessTableDataset - | SalesforceObjectDataset - | SalesforceServiceCloudObjectDataset - | SybaseTableDataset - | SapBwCubeDataset - | SapCloudForCustomerResourceDataset - | SapEccResourceDataset - | SapHanaTableDataset - | SapOpenHubTableDataset - | SqlServerTableDataset - | RestResourceDataset - | SapTableResourceDataset - | WebTableDataset - | AzureSearchIndexDataset - | HttpDataset - | AmazonMWSObjectDataset - | AzurePostgreSqlTableDataset - | ConcurObjectDataset - | CouchbaseTableDataset - | DrillTableDataset - | EloquaObjectDataset - | GoogleBigQueryObjectDataset - | GreenplumTableDataset - | HBaseObjectDataset - | HiveObjectDataset - | HubspotObjectDataset - | ImpalaObjectDataset - | JiraObjectDataset - | MagentoObjectDataset - | MariaDBTableDataset - | AzureMariaDBTableDataset - | MarketoObjectDataset - | PaypalObjectDataset - | PhoenixObjectDataset - | PrestoObjectDataset - | QuickBooksObjectDataset - | ServiceNowObjectDataset - | ShopifyObjectDataset - | SparkObjectDataset - | SquareObjectDataset - | XeroObjectDataset - | ZohoObjectDataset - | NetezzaTableDataset - | VerticaTableDataset - | SalesforceMarketingCloudObjectDataset - | ResponsysObjectDataset - | DynamicsAXResourceDataset - | OracleServiceCloudObjectDataset - | AzureDataExplorerTableDataset - | GoogleAdWordsObjectDataset - | SnowflakeDataset - | SharePointOnlineListResourceDataset - | AzureDatabricksDeltaLakeDataset; export type ActivityUnion = | Activity | ControlActivityUnion @@ -214,11 +223,6 @@ export type TriggerUnion = | MultiplePipelineTriggerUnion | TumblingWindowTrigger | ChainingTrigger; -export type DataFlowUnion = DataFlow | MappingDataFlow; -export type IntegrationRuntimeUnion = - | IntegrationRuntime - | ManagedIntegrationRuntime - | SelfHostedIntegrationRuntime; export type SecretBaseUnion = | SecretBase | SecureString @@ -321,6 +325,7 @@ export type CopySourceUnion = | HdfsSource | AzureDataExplorerSource | OracleSource + | AmazonRdsForOracleSource | WebSource | MongoDbSource | MongoDbAtlasSource @@ -453,6 +458,7 @@ export type TabularSourceUnion = | SapTableSource | SqlSource | SqlServerSource + | AmazonRdsForSqlServerSource | AzureSqlSource | SqlMISource | SqlDWSource @@ -498,163 +504,136 @@ export type TriggerDependencyReferenceUnion = | TriggerDependencyReference | TumblingWindowTriggerDependencyReference; -/** A list of linked service resources. */ -export interface LinkedServiceListResponse { - /** List of linked services. */ - value: LinkedServiceResource[]; - /** The link to the next page of results, if any remaining results exist. */ +export interface KqlScriptsResourceCollectionResponse { + value?: KqlScriptResource[]; nextLink?: string; } -/** Common fields that are returned in the response for all Azure Resource Manager resources */ -export interface Resource { +export interface KqlScriptResource { + id?: string; + name?: string; + type?: string; + /** Properties of sql script. */ + properties?: KqlScript; +} + +export interface KqlScript { + content?: KqlScriptContent; +} + +export interface KqlScriptContent { + query?: string; + metadata?: KqlScriptContentMetadata; + currentConnection?: KqlScriptContentCurrentConnection; +} + +export interface KqlScriptContentMetadata { + language?: string; +} + +export interface KqlScriptContentCurrentConnection { + name?: string; + type?: string; +} + +/** Contains details when the response code indicates an error. */ +export interface ErrorContract { + /** The error details. */ + error?: ErrorResponse; +} + +/** Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.) */ +export interface ErrorResponse { /** - * Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * The error code. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly id?: string; + readonly code?: string; /** - * The name of the resource + * The error message. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly name?: string; + readonly message?: string; /** - * The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + * The error target. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly type?: string; + readonly target?: string; + /** + * The error details. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly details?: ErrorResponse[]; + /** + * The error additional info. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly additionalInfo?: ErrorAdditionalInfo[]; } -/** The Azure Synapse nested object which contains the information and credential which can be used to connect with related store or compute resource. */ -export interface LinkedService { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "AzureStorage" - | "AzureBlobStorage" - | "AzureTableStorage" - | "AzureSqlDW" - | "SqlServer" - | "AzureSqlDatabase" - | "AzureSqlMI" - | "AzureBatch" - | "AzureKeyVault" - | "CosmosDb" - | "Dynamics" - | "DynamicsCrm" - | "CommonDataServiceForApps" - | "HDInsight" - | "FileServer" - | "AzureFileStorage" - | "GoogleCloudStorage" - | "Oracle" - | "AzureMySql" - | "MySql" - | "PostgreSql" - | "Sybase" - | "Db2" - | "Teradata" - | "AzureML" - | "AzureMLService" - | "Odbc" - | "Informix" - | "MicrosoftAccess" - | "Hdfs" - | "OData" - | "Web" - | "Cassandra" - | "MongoDb" - | "MongoDbAtlas" - | "MongoDbV2" - | "CosmosDbMongoDbApi" - | "AzureDataLakeStore" - | "AzureBlobFS" - | "Office365" - | "Salesforce" - | "SalesforceServiceCloud" - | "SapCloudForCustomer" - | "SapEcc" - | "SapOpenHub" - | "RestService" - | "AmazonS3" - | "AmazonRedshift" - | "CustomDataSource" - | "AzureSearch" - | "HttpServer" - | "FtpServer" - | "Sftp" - | "SapBW" - | "SapHana" - | "AmazonMWS" - | "AzurePostgreSql" - | "Concur" - | "Couchbase" - | "Drill" - | "Eloqua" - | "GoogleBigQuery" - | "Greenplum" - | "HBase" - | "Hive" - | "Hubspot" - | "Impala" - | "Jira" - | "Magento" - | "MariaDB" - | "AzureMariaDB" - | "Marketo" - | "Paypal" - | "Phoenix" - | "Presto" - | "QuickBooks" - | "ServiceNow" - | "Shopify" - | "Spark" - | "Square" - | "Xero" - | "Zoho" - | "Vertica" - | "Netezza" - | "SalesforceMarketingCloud" - | "HDInsightOnDemand" - | "AzureDataLakeAnalytics" - | "AzureDatabricks" - | "AzureDatabricksDeltaLake" - | "Responsys" - | "DynamicsAX" - | "OracleServiceCloud" - | "GoogleAdWords" - | "SapTable" - | "AzureDataExplorer" - | "AzureFunction" - | "Snowflake" - | "SharePointOnlineList"; - /** Describes unknown properties. The value of an unknown property can be of "any" type. */ - [property: string]: any; - /** The integration runtime reference. */ - connectVia?: IntegrationRuntimeReference; - /** Linked service description. */ - description?: string; - /** Parameters for linked service. */ - parameters?: { [propertyName: string]: ParameterSpecification }; - /** List of tags that can be used for describing the linked service. */ - annotations?: any[]; +/** The resource management error additional info. */ +export interface ErrorAdditionalInfo { + /** + * The additional info type. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * The additional info. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly info?: any; } -/** Integration runtime reference type. */ -export interface IntegrationRuntimeReference { - /** Type of integration runtime. */ - type: IntegrationRuntimeReferenceType; - /** Reference integration runtime name. */ - referenceName: string; - /** Arguments for integration runtime. */ - parameters?: { [propertyName: string]: any }; +/** Request body structure for rename artifact. */ +export interface ArtifactRenameRequest { + /** New name of the artifact. */ + newName?: string; } -/** Definition of a single parameter for an entity. */ -export interface ParameterSpecification { - /** Parameter type. */ - type: ParameterType; - /** Default value of parameter. */ - defaultValue?: any; +/** A list of sparkconfiguration resources. */ +export interface SparkConfigurationListResponse { + /** List of sparkconfigurations. */ + value: SparkConfigurationResource[]; + /** The link to the next page of results, if any remaining results exist. */ + nextLink?: string; +} + +/** Common fields that are returned in the response for all Azure Resource Manager resources */ +export interface Resource { + /** + * Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** + * The name of the resource + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly name?: string; + /** + * The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; +} + +/** SparkConfiguration Artifact information */ +export interface SparkConfiguration { + /** Description about the SparkConfiguration. */ + description?: string; + /** SparkConfiguration configs. */ + configs: { [propertyName: string]: string }; + /** Annotations for SparkConfiguration. */ + annotations?: string[]; + /** additional Notes. */ + notes?: string; + /** The identity that created the resource. */ + createdBy?: string; + /** The timestamp of resource creation. */ + created?: Date; + /** SparkConfiguration configMergeRule. */ + configMergeRule?: { [propertyName: string]: string }; } /** The object that defines the structure of an Azure Synapse error response. */ @@ -669,39 +648,216 @@ export interface CloudError { details?: CloudError[]; } -/** Request body structure for rename artifact. */ -export interface ArtifactRenameRequest { - /** New name of the artifact. */ - newName?: string; +/** Collection of Big Data pool information */ +export interface BigDataPoolResourceInfoListResult { + /** Link to the next page of results */ + nextLink?: string; + /** List of Big Data pools */ + value?: BigDataPoolResourceInfo[]; } -/** A list of dataset resources. */ -export interface DatasetListResponse { - /** List of datasets. */ - value: DatasetResource[]; - /** The link to the next page of results, if any remaining results exist. */ - nextLink?: string; +/** Auto-scaling properties of a Big Data pool powered by Apache Spark */ +export interface AutoScaleProperties { + /** The minimum number of nodes the Big Data pool can support. */ + minNodeCount?: number; + /** Whether automatic scaling is enabled for the Big Data pool. */ + enabled?: boolean; + /** The maximum number of nodes the Big Data pool can support. */ + maxNodeCount?: number; } -/** The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. */ -export interface Dataset { +/** Auto-pausing properties of a Big Data pool powered by Apache Spark */ +export interface AutoPauseProperties { + /** Number of minutes of idle time before the Big Data pool is automatically paused. */ + delayInMinutes?: number; + /** Whether auto-pausing is enabled for the Big Data pool. */ + enabled?: boolean; +} + +/** Dynamic Executor Allocation Properties */ +export interface DynamicExecutorAllocation { + /** Indicates whether Dynamic Executor Allocation is enabled or not. */ + enabled?: boolean; +} + +/** Library requirements for a Big Data pool powered by Apache Spark */ +export interface LibraryRequirements { + /** + * The last update time of the library requirements file. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly time?: Date; + /** The library requirements. */ + content?: string; + /** The filename of the library requirements file. */ + filename?: string; +} + +/** Library/package information of a Big Data pool powered by Apache Spark */ +export interface LibraryInfo { + /** Name of the library. */ + name?: string; + /** Storage blob path of library. */ + path?: string; + /** Storage blob container name. */ + containerName?: string; + /** The last update time of the library. */ + uploadedTimestamp?: Date; + /** Type of the library. */ + type?: string; + /** + * Provisioning status of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly provisioningStatus?: string; + /** + * Creator Id of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly creatorId?: string; +} + +/** Azure Synapse nested object which contains a flow with data movements and transformations. */ +export interface DataFlow { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "AmazonS3Object" - | "Avro" - | "Excel" - | "Parquet" - | "DelimitedText" - | "Json" - | "Xml" - | "Orc" - | "Binary" - | "AzureBlob" - | "AzureTable" - | "AzureSqlTable" - | "AzureSqlMITable" - | "AzureSqlDWTable" - | "CassandraTable" + type: "MappingDataFlow"; + /** The description of the data flow. */ + description?: string; + /** List of tags that can be used for describing the data flow. */ + annotations?: any[]; + /** The folder that this data flow is in. If not specified, Data flow will appear at the root level. */ + folder?: DataFlowFolder; +} + +/** The folder that this data flow is in. If not specified, Data flow will appear at the root level. */ +export interface DataFlowFolder { + /** The name of the folder that this data flow is in. */ + name?: string; +} + +/** The object that defines the structure of an Azure Synapse error response. */ +export interface CloudErrorAutoGenerated { + /** Error code. */ + code: string; + /** Error message. */ + message: string; + /** Property name/path in request associated with error. */ + target?: string; + /** Array with additional error details. */ + details?: CloudErrorAutoGenerated[]; +} + +/** A list of data flow resources. */ +export interface DataFlowListResponse { + /** List of data flows. */ + value: DataFlowResource[]; + /** The link to the next page of results, if any remaining results exist. */ + nextLink?: string; +} + +/** Request body structure for creating data flow debug session. */ +export interface CreateDataFlowDebugSessionRequest { + /** Compute type of the cluster. The value will be overwritten by the same setting in integration runtime if provided. */ + computeType?: string; + /** Core count of the cluster. The value will be overwritten by the same setting in integration runtime if provided. */ + coreCount?: number; + /** Time to live setting of the cluster in minutes. */ + timeToLive?: number; + /** Set to use integration runtime setting for data flow debug session. */ + integrationRuntime?: IntegrationRuntimeDebugResource; +} + +/** Azure Synapse nested debug resource. */ +export interface SubResourceDebugResource { + /** The resource name. */ + name?: string; +} + +/** Azure Synapse nested object which serves as a compute resource for activities. */ +export interface IntegrationRuntime { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "Managed" | "SelfHosted"; + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** Integration runtime description. */ + description?: string; +} + +/** Response body structure for creating data flow debug session. */ +export interface CreateDataFlowDebugSessionResponse { + /** The ID of data flow debug session. */ + sessionId?: string; +} + +/** A list of active debug sessions. */ +export interface QueryDataFlowDebugSessionsResponse { + /** Array with all active debug sessions. */ + value?: DataFlowDebugSessionInfo[]; + /** The link to the next page of results, if any remaining results exist. */ + nextLink?: string; +} + +/** Data flow debug session info. */ +export interface DataFlowDebugSessionInfo { + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** The name of the data flow. */ + dataFlowName?: string; + /** Compute type of the cluster. */ + computeType?: string; + /** Core count of the cluster. */ + coreCount?: number; + /** Node count of the cluster. (deprecated property) */ + nodeCount?: number; + /** Attached integration runtime name of data flow debug session. */ + integrationRuntimeName?: string; + /** The ID of data flow debug session. */ + sessionId?: string; + /** Start time of data flow debug session. */ + startTime?: string; + /** Compute type of the cluster. */ + timeToLiveInMinutes?: number; + /** Last activity time of data flow debug session. */ + lastActivityTime?: string; +} + +/** Request body structure for starting data flow debug session. */ +export interface DataFlowDebugPackage { + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** The ID of data flow debug session. */ + sessionId?: string; + /** Data flow instance. */ + dataFlow?: DataFlowDebugResource; + /** List of datasets. */ + datasets?: DatasetDebugResource[]; + /** List of linked services. */ + linkedServices?: LinkedServiceDebugResource[]; + /** Staging info for debug session. */ + staging?: DataFlowStagingInfo; + /** Data flow debug settings. */ + debugSettings?: DataFlowDebugPackageDebugSettings; +} + +/** The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. */ +export interface Dataset { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: + | "AmazonS3Object" + | "Avro" + | "Excel" + | "Parquet" + | "DelimitedText" + | "Json" + | "Xml" + | "Orc" + | "Binary" + | "AzureBlob" + | "AzureTable" + | "AzureSqlTable" + | "AzureSqlMITable" + | "AzureSqlDWTable" + | "CassandraTable" | "CustomDataset" | "CosmosDbSqlApiCollection" | "DocumentDbCollection" @@ -718,6 +874,7 @@ export interface Dataset { | "CosmosDbMongoDbApiCollection" | "ODataResource" | "OracleTable" + | "AmazonRdsForOracleTable" | "TeradataTable" | "AzureMySqlTable" | "AmazonRedshiftTable" @@ -737,6 +894,7 @@ export interface Dataset { | "SapHanaTable" | "SapOpenHubTable" | "SqlServerTable" + | "AmazonRdsForSqlServerTable" | "RestResource" | "SapTableResource" | "WebTable" @@ -782,35 +940,534 @@ export interface Dataset { | "AzureDatabricksDeltaLakeDataset"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; - /** Dataset description. */ + /** Dataset description. */ + description?: string; + /** Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. */ + structure?: any; + /** Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. */ + schema?: any; + /** Linked service reference. */ + linkedServiceName: LinkedServiceReference; + /** Parameters for dataset. */ + parameters?: { [propertyName: string]: ParameterSpecification }; + /** List of tags that can be used for describing the Dataset. */ + annotations?: any[]; + /** The folder that this Dataset is in. If not specified, Dataset will appear at the root level. */ + folder?: DatasetFolder; +} + +/** Linked service reference type. */ +export interface LinkedServiceReference { + /** Linked service reference type. */ + type: Type; + /** Reference LinkedService name. */ + referenceName: string; + /** Arguments for LinkedService. */ + parameters?: { [propertyName: string]: any }; +} + +/** Definition of a single parameter for an entity. */ +export interface ParameterSpecification { + /** Parameter type. */ + type: ParameterType; + /** Default value of parameter. */ + defaultValue?: any; +} + +/** The folder that this Dataset is in. If not specified, Dataset will appear at the root level. */ +export interface DatasetFolder { + /** The name of the folder that this Dataset is in. */ + name?: string; +} + +/** The Azure Synapse nested object which contains the information and credential which can be used to connect with related store or compute resource. */ +export interface LinkedService { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: + | "AzureStorage" + | "AzureBlobStorage" + | "AzureTableStorage" + | "AzureSqlDW" + | "SqlServer" + | "AmazonRdsForSqlServer" + | "AzureSqlDatabase" + | "AzureSqlMI" + | "AzureBatch" + | "AzureKeyVault" + | "CosmosDb" + | "Dynamics" + | "DynamicsCrm" + | "CommonDataServiceForApps" + | "HDInsight" + | "FileServer" + | "AzureFileStorage" + | "GoogleCloudStorage" + | "Oracle" + | "AmazonRdsForOracle" + | "AzureMySql" + | "MySql" + | "PostgreSql" + | "Sybase" + | "Db2" + | "Teradata" + | "AzureML" + | "AzureMLService" + | "Odbc" + | "Informix" + | "MicrosoftAccess" + | "Hdfs" + | "OData" + | "Web" + | "Cassandra" + | "MongoDb" + | "MongoDbAtlas" + | "MongoDbV2" + | "CosmosDbMongoDbApi" + | "AzureDataLakeStore" + | "AzureBlobFS" + | "Office365" + | "Salesforce" + | "SalesforceServiceCloud" + | "SapCloudForCustomer" + | "SapEcc" + | "SapOpenHub" + | "RestService" + | "AmazonS3" + | "AmazonRedshift" + | "CustomDataSource" + | "AzureSearch" + | "HttpServer" + | "FtpServer" + | "Sftp" + | "SapBW" + | "SapHana" + | "AmazonMWS" + | "AzurePostgreSql" + | "Concur" + | "Couchbase" + | "Drill" + | "Eloqua" + | "GoogleBigQuery" + | "Greenplum" + | "HBase" + | "Hive" + | "Hubspot" + | "Impala" + | "Jira" + | "Magento" + | "MariaDB" + | "AzureMariaDB" + | "Marketo" + | "Paypal" + | "Phoenix" + | "Presto" + | "QuickBooks" + | "ServiceNow" + | "Shopify" + | "Spark" + | "Square" + | "Xero" + | "Zoho" + | "Vertica" + | "Netezza" + | "SalesforceMarketingCloud" + | "HDInsightOnDemand" + | "AzureDataLakeAnalytics" + | "AzureDatabricks" + | "AzureDatabricksDeltaLake" + | "Responsys" + | "DynamicsAX" + | "OracleServiceCloud" + | "GoogleAdWords" + | "SapTable" + | "AzureDataExplorer" + | "AzureFunction" + | "Snowflake" + | "SharePointOnlineList"; + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** The integration runtime reference. */ + connectVia?: IntegrationRuntimeReference; + /** Linked service description. */ + description?: string; + /** Parameters for linked service. */ + parameters?: { [propertyName: string]: ParameterSpecification }; + /** List of tags that can be used for describing the linked service. */ + annotations?: any[]; +} + +/** Integration runtime reference type. */ +export interface IntegrationRuntimeReference { + /** Type of integration runtime. */ + type: IntegrationRuntimeReferenceType; + /** Reference integration runtime name. */ + referenceName: string; + /** Arguments for integration runtime. */ + parameters?: { [propertyName: string]: any }; +} + +/** Staging info for execute data flow activity. */ +export interface DataFlowStagingInfo { + /** Staging linked service reference. */ + linkedService?: LinkedServiceReference; + /** Folder path for staging blob. */ + folderPath?: string; +} + +/** Data flow debug settings. */ +export interface DataFlowDebugPackageDebugSettings { + /** Source setting for data flow debug. */ + sourceSettings?: DataFlowSourceSetting[]; + /** Data flow parameters. */ + parameters?: { [propertyName: string]: any }; + /** Parameters for dataset. */ + datasetParameters?: any; +} + +/** Definition of data flow source setting for debug. */ +export interface DataFlowSourceSetting { + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** The data flow source name. */ + sourceName?: string; + /** Defines the row limit of data flow source in debug. */ + rowLimit?: number; +} + +/** Response body structure for starting data flow debug session. */ +export interface AddDataFlowToDebugSessionResponse { + /** The ID of data flow debug job version. */ + jobVersion?: string; +} + +/** Request body structure for deleting data flow debug session. */ +export interface DeleteDataFlowDebugSessionRequest { + /** The ID of data flow debug session. */ + sessionId?: string; + /** The data flow which contains the debug session. */ + dataFlowName?: string; +} + +/** Request body structure for data flow expression preview. */ +export interface DataFlowDebugCommandRequest { + /** The ID of data flow debug session. */ + sessionId?: string; + /** The command type. */ + command?: DataFlowDebugCommandType; + /** The command payload object. */ + commandPayload?: DataFlowDebugCommandPayload; +} + +/** Structure of command payload. */ +export interface DataFlowDebugCommandPayload { + /** The stream name which is used for preview. */ + streamName: string; + /** Row limits for preview response. */ + rowLimits?: number; + /** Array of column names. */ + columns?: string[]; + /** The expression which is used for preview. */ + expression?: string; +} + +/** Response body structure of data flow result for data preview, statistics or expression preview. */ +export interface DataFlowDebugCommandResponse { + /** The run status of data preview, statistics or expression preview. */ + status?: string; + /** The result data of data preview, statistics or expression preview. */ + data?: string; +} + +/** A list of dataset resources. */ +export interface DatasetListResponse { + /** List of datasets. */ + value: DatasetResource[]; + /** The link to the next page of results, if any remaining results exist. */ + nextLink?: string; +} + +export interface GitHubAccessTokenRequest { + /** The GitHub Client Id. */ + gitHubClientId: string; + /** The GitHub Access code. */ + gitHubAccessCode: string; + /** The GitHub access token base URL. */ + gitHubAccessTokenBaseUrl: string; +} + +export interface GitHubAccessTokenResponse { + gitHubAccessToken?: string; +} + +/** A list of integration runtime resources. */ +export interface IntegrationRuntimeListResponse { + /** List of integration runtimes. */ + value: IntegrationRuntimeResource[]; + /** The link to the next page of results, if any remaining results exist. */ + nextLink?: string; +} + +/** A list of Library resources. */ +export interface LibraryListResponse { + /** List of Library. */ + value: LibraryResource[]; + /** The link to the next page of results, if any remaining results exist. */ + nextLink?: string; +} + +/** Library/package properties */ +export interface LibraryResourceProperties { + /** + * Name of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly name?: string; + /** + * Location of library/package in storage account. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly path?: string; + /** + * Container name of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly containerName?: string; + /** + * The last update time of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly uploadedTimestamp?: string; + /** + * Type of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * Provisioning status of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly provisioningStatus?: string; + /** + * Creator Id of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly creatorId?: string; +} + +/** Library resource info */ +export interface LibraryResourceInfo { + /** + * Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** + * record Id of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly recordId?: number; + /** + * Provisioning status of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly state?: string; + /** + * The creation time of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly created?: string; + /** + * The last updated time of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly changed?: string; + /** + * The type of the resource. E.g. LibraryArtifact + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * Name of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly name?: string; + /** + * Operation Id of the operation performed on library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly operationId?: string; + /** + * artifact Id of the library/package. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly artifactId?: string; +} + +/** Operation status for the operation */ +export interface OperationResult { + /** + * Operation status + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: string; + /** Error code. */ + code?: string; + /** Error message. */ + message?: string; + /** Property name/path in request associated with error. */ + target?: string; + /** Array with additional error details. */ + details?: CloudErrorAutoGenerated[]; +} + +/** A list of linked service resources. */ +export interface LinkedServiceListResponse { + /** List of linked services. */ + value: LinkedServiceResource[]; + /** The link to the next page of results, if any remaining results exist. */ + nextLink?: string; +} + +/** A list of Notebook resources. */ +export interface NotebookListResponse { + /** List of Notebooks. */ + value: NotebookResource[]; + /** The link to the next page of results, if any remaining results exist. */ + nextLink?: string; +} + +/** Notebook resource type. */ +export interface NotebookResource { + /** + * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** The name of the resource */ + name: string; + /** + * The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * Resource Etag. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly etag?: string; + /** Properties of Notebook. */ + properties: Notebook; +} + +/** Notebook. */ +export interface Notebook { + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** The description of the notebook. */ description?: string; - /** Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. */ - structure?: any; - /** Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. */ - schema?: any; - /** Linked service reference. */ - linkedServiceName: LinkedServiceReference; - /** Parameters for dataset. */ - parameters?: { [propertyName: string]: ParameterSpecification }; - /** List of tags that can be used for describing the Dataset. */ - annotations?: any[]; - /** The folder that this Dataset is in. If not specified, Dataset will appear at the root level. */ - folder?: DatasetFolder; + /** Big data pool reference. */ + bigDataPool?: BigDataPoolReference; + /** Session properties. */ + sessionProperties?: NotebookSessionProperties; + /** Notebook root-level metadata. */ + metadata: NotebookMetadata; + /** Notebook format (major number). Incremented between backwards incompatible changes to the notebook format. */ + nbformat: number; + /** Notebook format (minor number). Incremented for backward compatible changes to the notebook format. */ + nbformatMinor: number; + /** Array of cells of the current notebook. */ + cells: NotebookCell[]; + /** The folder that this notebook is in. If not specified, this notebook will appear at the root level. */ + folder?: NotebookFolder; } -/** Linked service reference type. */ -export interface LinkedServiceReference { - /** Linked service reference type. */ - type: Type; - /** Reference LinkedService name. */ +/** Big data pool reference. */ +export interface BigDataPoolReference { + /** Big data pool reference type. */ + type: BigDataPoolReferenceType; + /** Reference big data pool name. */ referenceName: string; - /** Arguments for LinkedService. */ - parameters?: { [propertyName: string]: any }; } -/** The folder that this Dataset is in. If not specified, Dataset will appear at the root level. */ -export interface DatasetFolder { - /** The name of the folder that this Dataset is in. */ +/** Session properties. */ +export interface NotebookSessionProperties { + /** Amount of memory to use for the driver process. */ + driverMemory: string; + /** Number of cores to use for the driver. */ + driverCores: number; + /** Amount of memory to use per executor process. */ + executorMemory: string; + /** Number of cores to use for each executor. */ + executorCores: number; + /** Number of executors to launch for this session. */ + numExecutors: number; +} + +/** Notebook root-level metadata. */ +export interface NotebookMetadata { + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** Kernel information. */ + kernelspec?: NotebookKernelSpec; + /** Language info. */ + languageInfo?: NotebookLanguageInfo; +} + +/** Kernel information. */ +export interface NotebookKernelSpec { + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** Name of the kernel specification. */ + name: string; + /** Name to display in UI. */ + displayName: string; +} + +/** Language info. */ +export interface NotebookLanguageInfo { + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** The programming language which this kernel runs. */ + name: string; + /** The codemirror mode to use for code in this language. */ + codemirrorMode?: string; +} + +/** Notebook cell. */ +export interface NotebookCell { + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** String identifying the type of cell. */ + cellType: string; + /** Cell-level metadata. */ + metadata: any; + /** Contents of the cell, represented as an array of lines. */ + source: string[]; + /** Attachments associated with the cell. */ + attachments?: any; + /** Cell-level output items. */ + outputs?: NotebookCellOutputItem[]; +} + +/** An item of the notebook cell execution output. */ +export interface NotebookCellOutputItem { + /** For output_type=stream, determines the name of stream (stdout / stderr). */ + name?: string; + /** Execution sequence number. */ + executionCount?: number; + /** Execution, display, or stream outputs. */ + outputType: CellOutputType; + /** For output_type=stream, the stream's text output, represented as a string or an array of strings. */ + text?: any; + /** Output data. Use MIME type as key, and content as value. */ + data?: any; + /** Metadata for the output item. */ + metadata?: any; +} + +/** The folder that this notebook is in. If not specified, this notebook will appear at the root level. */ +export interface NotebookFolder { + /** The name of the folder that this notebook is in. */ name?: string; } @@ -1006,457 +1663,117 @@ export interface PipelineRun { readonly runEnd?: Date; /** * The duration of a pipeline run. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly durationInMs?: number; - /** - * The status of a pipeline run. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly status?: string; - /** - * The message from a pipeline run. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly message?: string; -} - -/** Provides entity name and id that started the pipeline run. */ -export interface PipelineRunInvokedBy { - /** - * Name of the entity that started the pipeline run. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly name?: string; - /** - * The ID of the entity that started the run. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly id?: string; - /** - * The type of the entity that started the run. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly invokedByType?: string; -} - -/** A list activity runs. */ -export interface ActivityRunsQueryResponse { - /** List of activity runs. */ - value: ActivityRun[]; - /** The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */ - continuationToken?: string; -} - -/** Information about an activity run in a pipeline. */ -export interface ActivityRun { - /** Describes unknown properties. The value of an unknown property can be of "any" type. */ - [property: string]: any; - /** - * The name of the pipeline. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly pipelineName?: string; - /** - * The id of the pipeline run. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly pipelineRunId?: string; - /** - * The name of the activity. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly activityName?: string; - /** - * The type of the activity. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly activityType?: string; - /** - * The id of the activity run. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly activityRunId?: string; - /** - * The name of the compute linked service. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly linkedServiceName?: string; - /** - * The status of the activity run. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly status?: string; - /** - * The start time of the activity run in 'ISO 8601' format. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly activityRunStart?: Date; - /** - * The end time of the activity run in 'ISO 8601' format. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly activityRunEnd?: Date; - /** - * The duration of the activity run. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly durationInMs?: number; - /** - * The input for the activity. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly input?: any; - /** - * The output for the activity. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly output?: any; - /** - * The error if any from the activity run. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly error?: any; -} - -/** A list of trigger resources. */ -export interface TriggerListResponse { - /** List of triggers. */ - value: TriggerResource[]; - /** The link to the next page of results, if any remaining results exist. */ - nextLink?: string; -} - -/** Azure Synapse nested object which contains information about creating pipeline run */ -export interface Trigger { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "RerunTumblingWindowTrigger" - | "MultiplePipelineTrigger" - | "ScheduleTrigger" - | "BlobTrigger" - | "BlobEventsTrigger" - | "CustomEventsTrigger" - | "TumblingWindowTrigger" - | "ChainingTrigger"; - /** Describes unknown properties. The value of an unknown property can be of "any" type. */ - [property: string]: any; - /** Trigger description. */ - description?: string; + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly durationInMs?: number; /** - * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. + * The status of a pipeline run. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly runtimeState?: TriggerRuntimeState; - /** List of tags that can be used for describing the trigger. */ - annotations?: any[]; + readonly status?: string; + /** + * The message from a pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly message?: string; } -/** Defines the response of a trigger subscription operation. */ -export interface TriggerSubscriptionOperationStatus { +/** Provides entity name and id that started the pipeline run. */ +export interface PipelineRunInvokedBy { /** - * Trigger name. + * Name of the entity that started the pipeline run. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly triggerName?: string; + readonly name?: string; /** - * Event Subscription Status. + * The ID of the entity that started the run. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly status?: EventSubscriptionStatus; + readonly id?: string; + /** + * The type of the entity that started the run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly invokedByType?: string; } -/** A list of trigger runs. */ -export interface TriggerRunsQueryResponse { - /** List of trigger runs. */ - value: TriggerRun[]; +/** A list activity runs. */ +export interface ActivityRunsQueryResponse { + /** List of activity runs. */ + value: ActivityRun[]; /** The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */ continuationToken?: string; } -/** Trigger runs. */ -export interface TriggerRun { +/** Information about an activity run in a pipeline. */ +export interface ActivityRun { /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; /** - * Trigger run id. + * The name of the pipeline. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly triggerRunId?: string; + readonly pipelineName?: string; /** - * Trigger name. + * The id of the pipeline run. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly triggerName?: string; + readonly pipelineRunId?: string; /** - * Trigger type. + * The name of the activity. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly triggerType?: string; + readonly activityName?: string; /** - * Trigger run start time. + * The type of the activity. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly triggerRunTimestamp?: Date; + readonly activityType?: string; /** - * Trigger run status. + * The id of the activity run. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly status?: TriggerRunStatus; + readonly activityRunId?: string; /** - * Trigger error message. + * The name of the compute linked service. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly message?: string; + readonly linkedServiceName?: string; /** - * List of property name and value related to trigger run. Name, value pair depends on type of trigger. + * The status of the activity run. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly properties?: { [propertyName: string]: string }; + readonly status?: string; /** - * List of pipeline name and run Id triggered by the trigger run. + * The start time of the activity run in 'ISO 8601' format. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly triggeredPipelines?: { [propertyName: string]: string }; -} - -/** Azure Synapse nested object which contains a flow with data movements and transformations. */ -export interface DataFlow { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MappingDataFlow"; - /** The description of the data flow. */ - description?: string; - /** List of tags that can be used for describing the data flow. */ - annotations?: any[]; - /** The folder that this data flow is in. If not specified, Data flow will appear at the root level. */ - folder?: DataFlowFolder; -} - -/** The folder that this data flow is in. If not specified, Data flow will appear at the root level. */ -export interface DataFlowFolder { - /** The name of the folder that this data flow is in. */ - name?: string; -} - -/** A list of data flow resources. */ -export interface DataFlowListResponse { - /** List of data flows. */ - value: DataFlowResource[]; - /** The link to the next page of results, if any remaining results exist. */ - nextLink?: string; -} - -/** Request body structure for creating data flow debug session. */ -export interface CreateDataFlowDebugSessionRequest { - /** The name of the data flow. */ - dataFlowName?: string; - /** The ID of existing Databricks cluster. */ - existingClusterId?: string; - /** Timeout setting for Databricks cluster. */ - clusterTimeout?: number; - /** The name of new Databricks cluster. */ - newClusterName?: string; - /** The type of new Databricks cluster. */ - newClusterNodeType?: string; - /** Data bricks linked service. */ - dataBricksLinkedService?: LinkedServiceResource; -} - -/** Response body structure for creating data flow debug session. */ -export interface CreateDataFlowDebugSessionResponse { - /** The ID of data flow debug session. */ - sessionId?: string; -} - -/** A list of active debug sessions. */ -export interface QueryDataFlowDebugSessionsResponse { - /** Array with all active debug sessions. */ - value?: DataFlowDebugSessionInfo[]; - /** The link to the next page of results, if any remaining results exist. */ - nextLink?: string; -} - -/** Data flow debug session info. */ -export interface DataFlowDebugSessionInfo { - /** Describes unknown properties. The value of an unknown property can be of "any" type. */ - [property: string]: any; - /** The name of the data flow. */ - dataFlowName?: string; - /** Compute type of the cluster. */ - computeType?: string; - /** Core count of the cluster. */ - coreCount?: number; - /** Node count of the cluster. (deprecated property) */ - nodeCount?: number; - /** Attached integration runtime name of data flow debug session. */ - integrationRuntimeName?: string; - /** The ID of data flow debug session. */ - sessionId?: string; - /** Start time of data flow debug session. */ - startTime?: string; - /** Compute type of the cluster. */ - timeToLiveInMinutes?: number; - /** Last activity time of data flow debug session. */ - lastActivityTime?: string; -} - -/** Request body structure for starting data flow debug session. */ -export interface DataFlowDebugPackage { - /** Describes unknown properties. The value of an unknown property can be of "any" type. */ - [property: string]: any; - /** The ID of data flow debug session. */ - sessionId?: string; - /** Data flow instance. */ - dataFlow?: DataFlowDebugResource; - /** List of datasets. */ - datasets?: DatasetDebugResource[]; - /** List of linked services. */ - linkedServices?: LinkedServiceDebugResource[]; - /** Staging info for debug session. */ - staging?: DataFlowStagingInfo; - /** Data flow debug settings. */ - debugSettings?: DataFlowDebugPackageDebugSettings; -} - -/** Azure Synapse nested debug resource. */ -export interface SubResourceDebugResource { - /** The resource name. */ - name?: string; -} - -/** Staging info for execute data flow activity. */ -export interface DataFlowStagingInfo { - /** Staging linked service reference. */ - linkedService?: LinkedServiceReference; - /** Folder path for staging blob. */ - folderPath?: string; -} - -/** Data flow debug settings. */ -export interface DataFlowDebugPackageDebugSettings { - /** Source setting for data flow debug. */ - sourceSettings?: DataFlowSourceSetting[]; - /** Data flow parameters. */ - parameters?: { [propertyName: string]: any }; - /** Parameters for dataset. */ - datasetParameters?: any; -} - -/** Definition of data flow source setting for debug. */ -export interface DataFlowSourceSetting { - /** Describes unknown properties. The value of an unknown property can be of "any" type. */ - [property: string]: any; - /** The data flow source name. */ - sourceName?: string; - /** Defines the row limit of data flow source in debug. */ - rowLimit?: number; -} - -/** Response body structure for starting data flow debug session. */ -export interface AddDataFlowToDebugSessionResponse { - /** The ID of data flow debug job version. */ - jobVersion?: string; -} - -/** Request body structure for deleting data flow debug session. */ -export interface DeleteDataFlowDebugSessionRequest { - /** The ID of data flow debug session. */ - sessionId?: string; - /** The data flow which contains the debug session. */ - dataFlowName?: string; -} - -/** Request body structure for data flow expression preview. */ -export interface DataFlowDebugCommandRequest { - /** The ID of data flow debug session. */ - sessionId: string; - /** The data flow which contains the debug session. */ - dataFlowName?: string; - /** The command name. */ - commandName?: string; - /** The command payload object. */ - commandPayload: any; -} - -/** Response body structure of data flow result for data preview, statistics or expression preview. */ -export interface DataFlowDebugCommandResponse { - /** The run status of data preview, statistics or expression preview. */ - status?: string; - /** The result data of data preview, statistics or expression preview. */ - data?: string; -} - -/** A list of sql scripts resources. */ -export interface SqlScriptsListResponse { - /** List of sql scripts. */ - value: SqlScriptResource[]; - /** The link to the next page of results, if any remaining results exist. */ - nextLink?: string; -} - -/** Sql Script resource type. */ -export interface SqlScriptResource { + readonly activityRunStart?: Date; /** - * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * The end time of the activity run in 'ISO 8601' format. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly id?: string; - /** The name of the resource */ - name: string; + readonly activityRunEnd?: Date; + /** + * The duration of the activity run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly durationInMs?: number; + /** + * The input for the activity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly input?: any; /** - * The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + * The output for the activity. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly type?: string; + readonly output?: any; /** - * Resource Etag. + * The error if any from the activity run. * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly etag?: string; - /** Properties of sql script. */ - properties: SqlScript; -} - -/** SQL script. */ -export interface SqlScript { - /** Describes unknown properties. The value of an unknown property can be of "any" type. */ - [property: string]: any; - /** The description of the SQL script. */ - description?: string; - /** The type of the SQL script. */ - type?: SqlScriptType; - /** The content of the SQL script. */ - content: SqlScriptContent; -} - -/** The content of the SQL script. */ -export interface SqlScriptContent { - /** Describes unknown properties. The value of an unknown property can be of "any" type. */ - [property: string]: any; - /** SQL query to execute. */ - query: string; - /** The connection used to execute the SQL script. */ - currentConnection: SqlConnection; - /** The metadata of the SQL script. */ - metadata?: SqlScriptMetadata; -} - -/** The connection used to execute the SQL script. */ -export interface SqlConnection { - /** Describes unknown properties. The value of an unknown property can be of "any" type. */ - [property: string]: any; - /** The type of the connection. */ - type: SqlConnectionType; - /** The identifier of the connection. */ - name: string; -} - -/** The metadata of the SQL script. */ -export interface SqlScriptMetadata { - /** Describes unknown properties. The value of an unknown property can be of "any" type. */ - [property: string]: any; - /** The language of the SQL script. */ - language?: string; + readonly error?: any; } /** A list of spark job definitions resources. */ @@ -1481,14 +1798,8 @@ export interface SparkJobDefinition { language?: string; /** The properties of the Spark job. */ jobProperties: SparkJobProperties; -} - -/** Big data pool reference. */ -export interface BigDataPoolReference { - /** Big data pool reference type. */ - type: BigDataPoolReferenceType; - /** Reference big data pool name. */ - referenceName: string; + /** The folder that this Spark job definition is in. If not specified, this Spark job definition will appear at the root level. */ + folder?: SparkJobDefinitionFolder; } /** The properties of the Spark job. */ @@ -1523,6 +1834,12 @@ export interface SparkJobProperties { numExecutors: number; } +/** The folder that this Spark job definition is in. If not specified, this Spark job definition will appear at the root level. */ +export interface SparkJobDefinitionFolder { + /** The name of the folder that this Spark job definition is in. */ + name?: string; +} + export interface SparkBatchJob { livyInfo?: SparkBatchJobState; /** The batch name. */ @@ -1556,7 +1873,7 @@ export interface SparkBatchJob { /** The detailed application info. */ appInfo?: { [propertyName: string]: string }; /** The batch state */ - state?: string; + state?: LivyStates; /** The log lines. */ logLines?: string[]; } @@ -1622,16 +1939,34 @@ export interface SparkServiceError { source?: SparkErrorSource; } -/** A list of Notebook resources. */ -export interface NotebookListResponse { - /** List of Notebooks. */ - value: NotebookResource[]; +/** List of SQL pools */ +export interface SqlPoolInfoListResult { + /** Link to the next page of results */ + nextLink?: string; + /** List of SQL pools */ + value?: SqlPool[]; +} + +/** SQL pool SKU */ +export interface Sku { + /** The service tier */ + tier?: string; + /** The SKU name */ + name?: string; + /** If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted. */ + capacity?: number; +} + +/** A list of sql scripts resources. */ +export interface SqlScriptsListResponse { + /** List of sql scripts. */ + value: SqlScriptResource[]; /** The link to the next page of results, if any remaining results exist. */ nextLink?: string; } -/** Notebook resource type. */ -export interface NotebookResource { +/** Sql Script resource type. */ +export interface SqlScriptResource { /** * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} * NOTE: This property will not be serialized. It can only be populated by the server. @@ -1649,104 +1984,165 @@ export interface NotebookResource { * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly etag?: string; - /** Properties of Notebook. */ - properties: Notebook; + /** Properties of sql script. */ + properties: SqlScript; } -/** Notebook. */ -export interface Notebook { +/** SQL script. */ +export interface SqlScript { /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; - /** The description of the notebook. */ + /** The description of the SQL script. */ description?: string; - /** Big data pool reference. */ - bigDataPool?: BigDataPoolReference; - /** Session properties. */ - sessionProperties?: NotebookSessionProperties; - /** Notebook root-level metadata. */ - metadata: NotebookMetadata; - /** Notebook format (major number). Incremented between backwards incompatible changes to the notebook format. */ - nbformat: number; - /** Notebook format (minor number). Incremented for backward compatible changes to the notebook format. */ - nbformatMinor: number; - /** Array of cells of the current notebook. */ - cells: NotebookCell[]; -} - -/** Session properties. */ -export interface NotebookSessionProperties { - /** Amount of memory to use for the driver process. */ - driverMemory: string; - /** Number of cores to use for the driver. */ - driverCores: number; - /** Amount of memory to use per executor process. */ - executorMemory: string; - /** Number of cores to use for each executor. */ - executorCores: number; - /** Number of executors to launch for this session. */ - numExecutors: number; + /** The type of the SQL script. */ + type?: SqlScriptType; + /** The content of the SQL script. */ + content: SqlScriptContent; + /** The folder that this SQL script is in. If not specified, this SQL script will appear at the root level. */ + folder?: SqlScriptFolder; } -/** Notebook root-level metadata. */ -export interface NotebookMetadata { +/** The content of the SQL script. */ +export interface SqlScriptContent { /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; - /** Kernel information. */ - kernelspec?: NotebookKernelSpec; - /** Language info. */ - languageInfo?: NotebookLanguageInfo; + /** SQL query to execute. */ + query: string; + /** The connection used to execute the SQL script. */ + currentConnection?: SqlConnection; + /** Limit of results, '-1' for no limit. */ + resultLimit?: number; + /** The metadata of the SQL script. */ + metadata?: SqlScriptMetadata; } -/** Kernel information. */ -export interface NotebookKernelSpec { +/** The connection used to execute the SQL script. */ +export interface SqlConnection { /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; - /** Name of the kernel specification. */ - name: string; - /** Name to display in UI. */ - displayName: string; + /** The type of the connection. */ + type?: SqlConnectionType; + /** The identifier of the connection. */ + name?: string; + /** The associated SQL pool name (supported by SQL pool v3) */ + poolName?: string; + /** The associated database name (supported by SQL pool v3) */ + databaseName?: string; } -/** Language info. */ -export interface NotebookLanguageInfo { +/** The metadata of the SQL script. */ +export interface SqlScriptMetadata { /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; - /** The programming language which this kernel runs. */ - name: string; - /** The codemirror mode to use for code in this language. */ - codemirrorMode?: string; + /** The language of the SQL script. */ + language?: string; } -/** Notebook cell. */ -export interface NotebookCell { +/** The folder that this SQL script is in. If not specified, this SQL script will appear at the root level. */ +export interface SqlScriptFolder { + /** The name of the folder that this SQL script is in. */ + name?: string; +} + +/** A list of trigger resources. */ +export interface TriggerListResponse { + /** List of triggers. */ + value: TriggerResource[]; + /** The link to the next page of results, if any remaining results exist. */ + nextLink?: string; +} + +/** Azure Synapse nested object which contains information about creating pipeline run */ +export interface Trigger { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: + | "RerunTumblingWindowTrigger" + | "MultiplePipelineTrigger" + | "ScheduleTrigger" + | "BlobTrigger" + | "BlobEventsTrigger" + | "CustomEventsTrigger" + | "TumblingWindowTrigger" + | "ChainingTrigger"; /** Describes unknown properties. The value of an unknown property can be of "any" type. */ [property: string]: any; - /** String identifying the type of cell. */ - cellType: string; - /** Cell-level metadata. */ - metadata: any; - /** Contents of the cell, represented as an array of lines. */ - source: string[]; - /** Attachments associated with the cell. */ - attachments?: any; - /** Cell-level output items. */ - outputs?: NotebookCellOutputItem[]; + /** Trigger description. */ + description?: string; + /** + * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly runtimeState?: TriggerRuntimeState; + /** List of tags that can be used for describing the trigger. */ + annotations?: any[]; } -/** An item of the notebook cell execution output. */ -export interface NotebookCellOutputItem { - /** For output_type=stream, determines the name of stream (stdout / stderr). */ - name?: string; - /** Execution sequence number. */ - executionCount?: number; - /** Execution, display, or stream outputs. */ - outputType: CellOutputType; - /** For output_type=stream, the stream's text output, represented as a string or an array of strings. */ - text?: any; - /** Output data. Use MIME type as key, and content as value. */ - data?: any; - /** Metadata for the output item. */ - metadata?: any; +/** Defines the response of a trigger subscription operation. */ +export interface TriggerSubscriptionOperationStatus { + /** + * Trigger name. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerName?: string; + /** + * Event Subscription Status. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: EventSubscriptionStatus; +} + +/** A list of trigger runs. */ +export interface TriggerRunsQueryResponse { + /** List of trigger runs. */ + value: TriggerRun[]; + /** The continuation token for getting the next page of results, if any remaining results exist, null otherwise. */ + continuationToken?: string; +} + +/** Trigger runs. */ +export interface TriggerRun { + /** Describes unknown properties. The value of an unknown property can be of "any" type. */ + [property: string]: any; + /** + * Trigger run id. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerRunId?: string; + /** + * Trigger name. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerName?: string; + /** + * Trigger type. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerType?: string; + /** + * Trigger run start time. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerRunTimestamp?: Date; + /** + * Trigger run status. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: TriggerRunStatus; + /** + * Trigger error message. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly message?: string; + /** + * List of property name and value related to trigger run. Name, value pair depends on type of trigger. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly properties?: { [propertyName: string]: string }; + /** + * List of pipeline name and run Id triggered by the trigger run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggeredPipelines?: { [propertyName: string]: string }; } /** Details of the data lake storage account associated with the workspace */ @@ -1858,312 +2254,27 @@ export interface GitHubClientSecret { /** Bring your own app client secret name in AKV */ byoaSecretName?: string; } - -/** Purview Configuration */ -export interface PurviewConfiguration { - /** Purview Resource ID */ - purviewResourceId?: string; -} - -/** The workspace managed identity */ -export interface ManagedIdentity { - /** - * The principal ID of the workspace managed identity - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly principalId?: string; - /** - * The tenant ID of the workspace managed identity - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly tenantId?: string; - /** The type of managed identity for the workspace */ - type?: ResourceIdentityType; -} - -/** Contains details when the response code indicates an error. */ -export interface ErrorContract { - /** The error details. */ - error?: ErrorResponse; -} - -/** Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.) */ -export interface ErrorResponse { - /** - * The error code. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly code?: string; - /** - * The error message. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly message?: string; - /** - * The error target. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly target?: string; - /** - * The error details. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly details?: ErrorResponse[]; - /** - * The error additional info. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly additionalInfo?: ErrorAdditionalInfo[]; -} - -/** The resource management error additional info. */ -export interface ErrorAdditionalInfo { - /** - * The additional info type. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly type?: string; - /** - * The additional info. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly info?: any; -} - -/** List of SQL pools */ -export interface SqlPoolInfoListResult { - /** Link to the next page of results */ - nextLink?: string; - /** List of SQL pools */ - value?: SqlPool[]; -} - -/** SQL pool SKU */ -export interface Sku { - /** The service tier */ - tier?: string; - /** The SKU name */ - name?: string; - /** If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted. */ - capacity?: number; -} - -/** Collection of Big Data pool information */ -export interface BigDataPoolResourceInfoListResult { - /** Link to the next page of results */ - nextLink?: string; - /** List of Big Data pools */ - value?: BigDataPoolResourceInfo[]; -} - -/** Auto-scaling properties of a Big Data pool powered by Apache Spark */ -export interface AutoScaleProperties { - /** The minimum number of nodes the Big Data pool can support. */ - minNodeCount?: number; - /** Whether automatic scaling is enabled for the Big Data pool. */ - enabled?: boolean; - /** The maximum number of nodes the Big Data pool can support. */ - maxNodeCount?: number; -} - -/** Auto-pausing properties of a Big Data pool powered by Apache Spark */ -export interface AutoPauseProperties { - /** Number of minutes of idle time before the Big Data pool is automatically paused. */ - delayInMinutes?: number; - /** Whether auto-pausing is enabled for the Big Data pool. */ - enabled?: boolean; -} - -/** Dynamic Executor Allocation Properties */ -export interface DynamicExecutorAllocation { - /** Indicates whether Dynamic Executor Allocation is enabled or not. */ - enabled?: boolean; -} - -/** Library requirements for a Big Data pool powered by Apache Spark */ -export interface LibraryRequirements { - /** - * The last update time of the library requirements file. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly time?: Date; - /** The library requirements. */ - content?: string; - /** The filename of the library requirements file. */ - filename?: string; -} - -/** Library/package information of a Big Data pool powered by Apache Spark */ -export interface LibraryInfo { - /** Name of the library. */ - name?: string; - /** Storage blob path of library. */ - path?: string; - /** Storage blob container name. */ - containerName?: string; - /** - * The last update time of the library. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly uploadedTimestamp?: Date; - /** Type of the library. */ - type?: string; - /** - * Provisioning status of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly provisioningStatus?: string; - /** - * Creator Id of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly creatorId?: string; -} - -/** A list of integration runtime resources. */ -export interface IntegrationRuntimeListResponse { - /** List of integration runtimes. */ - value: IntegrationRuntimeResource[]; - /** The link to the next page of results, if any remaining results exist. */ - nextLink?: string; -} - -/** Azure Synapse nested object which serves as a compute resource for activities. */ -export interface IntegrationRuntime { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Managed" | "SelfHosted"; - /** Describes unknown properties. The value of an unknown property can be of "any" type. */ - [property: string]: any; - /** Integration runtime description. */ - description?: string; -} - -/** A list of Library resources. */ -export interface LibraryListResponse { - /** List of Library. */ - value: LibraryResource[]; - /** The link to the next page of results, if any remaining results exist. */ - nextLink?: string; -} - -/** Library/package properties */ -export interface LibraryResourceProperties { - /** - * Name of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly name?: string; - /** - * Location of library/package in storage account. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly path?: string; - /** - * Container name of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly containerName?: string; - /** - * The last update time of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly uploadedTimestamp?: string; - /** - * Type of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly type?: string; - /** - * Provisioning status of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly provisioningStatus?: string; - /** - * Creator Id of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly creatorId?: string; + +/** Purview Configuration */ +export interface PurviewConfiguration { + /** Purview Resource ID */ + purviewResourceId?: string; } -/** Library resource info */ -export interface LibraryResourceInfo { - /** - * Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly id?: string; - /** - * record Id of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly recordId?: number; - /** - * Provisioning status of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly state?: string; - /** - * The creation time of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly created?: string; - /** - * The last updated time of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly changed?: string; - /** - * The type of the resource. E.g. LibraryArtifact - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly type?: string; - /** - * Name of the library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly name?: string; - /** - * Operation Id of the operation performed on library/package. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly operationId?: string; +/** The workspace managed identity */ +export interface ManagedIdentity { /** - * artifact Id of the library/package. + * The principal ID of the workspace managed identity * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly artifactId?: string; -} - -/** Operation status for the operation */ -export interface OperationResult { + readonly principalId?: string; /** - * Operation status + * The tenant ID of the workspace managed identity * NOTE: This property will not be serialized. It can only be populated by the server. */ - readonly status?: string; - /** Error code. */ - code?: string; - /** Error message. */ - message?: string; - /** Property name/path in request associated with error. */ - target?: string; - /** Array with additional error details. */ - details?: CloudError[]; -} - -export interface GitHubAccessTokenRequest { - /** The GitHub Client Id. */ - gitHubClientId: string; - /** GitHub bring your own app client secret information. */ - gitHubClientSecret?: GitHubClientSecret; - /** The GitHub Access code. */ - gitHubAccessCode: string; - /** The GitHub access token base URL. */ - gitHubAccessTokenBaseUrl: string; -} - -export interface GitHubAccessTokenResponse { - gitHubAccessToken?: string; + readonly tenantId?: string; + /** The type of managed identity for the workspace */ + type?: ResourceIdentityType; } /** Azure Synapse expression definition. */ @@ -2180,82 +2291,6 @@ export interface SecretBase { type: "SecureString" | "AzureKeyVaultSecret"; } -/** Request body structure for starting data flow debug session. */ -export interface StartDataFlowDebugSessionRequest { - /** The ID of data flow debug session. */ - sessionId?: string; - /** Data flow instance. */ - dataFlow?: DataFlowResource; - /** List of datasets. */ - datasets?: DatasetResource[]; - /** List of linked services. */ - linkedServices?: LinkedServiceResource[]; - /** Staging info for debug session. */ - staging?: any; - /** Data flow debug settings. */ - debugSettings?: any; - /** The type of new Databricks cluster. */ - incrementalDebug?: boolean; -} - -/** Response body structure for starting data flow debug session. */ -export interface StartDataFlowDebugSessionResponse { - /** The ID of data flow debug job version. */ - jobVersion?: string; -} - -/** Request body structure for data flow preview data. */ -export interface DataFlowDebugPreviewDataRequest { - /** The ID of data flow debug session. */ - sessionId?: string; - /** The data flow which contains the debug session. */ - dataFlowName?: string; - /** The output stream name. */ - streamName?: string; - /** The row limit for preview request. */ - rowLimits?: number; -} - -/** Request body structure for data flow statistics. */ -export interface DataFlowDebugStatisticsRequest { - /** The ID of data flow debug session. */ - sessionId?: string; - /** The data flow which contains the debug session. */ - dataFlowName?: string; - /** The output stream name. */ - streamName?: string; - /** List of column names. */ - columns?: string[]; -} - -/** Request body structure for data flow expression preview. */ -export interface EvaluateDataFlowExpressionRequest { - /** The ID of data flow debug session. */ - sessionId?: string; - /** The data flow which contains the debug session. */ - dataFlowName?: string; - /** The output stream name. */ - streamName?: string; - /** The row limit for preview request. */ - rowLimits?: number; - /** The expression for preview. */ - expression?: string; -} - -/** Response body structure of data flow query for data preview, statistics or expression preview. */ -export interface DataFlowDebugQueryResponse { - /** The run ID of data flow debug session. */ - runId?: string; -} - -/** Response body structure of data flow result for data preview, statistics or expression preview. */ -export interface DataFlowDebugResultResponse { - /** The run status of data preview, statistics or expression preview. */ - status?: string; - /** The result data of data preview, statistics or expression preview. */ - data?: string; -} - /** Defines the response of a provision trigger dependency operation. */ export interface TriggerDependencyProvisioningStatus { /** Trigger name. */ @@ -2413,6 +2448,82 @@ export interface SqlPoolReference { referenceName: string; } +/** Request body structure for starting data flow debug session. */ +export interface StartDataFlowDebugSessionRequest { + /** The ID of data flow debug session. */ + sessionId?: string; + /** Data flow instance. */ + dataFlow?: DataFlowResource; + /** List of datasets. */ + datasets?: DatasetResource[]; + /** List of linked services. */ + linkedServices?: LinkedServiceResource[]; + /** Staging info for debug session. */ + staging?: any; + /** Data flow debug settings. */ + debugSettings?: any; + /** The type of new Databricks cluster. */ + incrementalDebug?: boolean; +} + +/** Response body structure for starting data flow debug session. */ +export interface StartDataFlowDebugSessionResponse { + /** The ID of data flow debug job version. */ + jobVersion?: string; +} + +/** Request body structure for data flow preview data. */ +export interface DataFlowDebugPreviewDataRequest { + /** The ID of data flow debug session. */ + sessionId?: string; + /** The data flow which contains the debug session. */ + dataFlowName?: string; + /** The output stream name. */ + streamName?: string; + /** The row limit for preview request. */ + rowLimits?: number; +} + +/** Request body structure for data flow statistics. */ +export interface DataFlowDebugStatisticsRequest { + /** The ID of data flow debug session. */ + sessionId?: string; + /** The data flow which contains the debug session. */ + dataFlowName?: string; + /** The output stream name. */ + streamName?: string; + /** List of column names. */ + columns?: string[]; +} + +/** Request body structure for data flow expression preview. */ +export interface EvaluateDataFlowExpressionRequest { + /** The ID of data flow debug session. */ + sessionId?: string; + /** The data flow which contains the debug session. */ + dataFlowName?: string; + /** The output stream name. */ + streamName?: string; + /** The row limit for preview request. */ + rowLimits?: number; + /** The expression for preview. */ + expression?: string; +} + +/** Response body structure of data flow query for data preview, statistics or expression preview. */ +export interface DataFlowDebugQueryResponse { + /** The run ID of data flow debug session. */ + runId?: string; +} + +/** Response body structure of data flow result for data preview, statistics or expression preview. */ +export interface DataFlowDebugResultResponse { + /** The run status of data preview, statistics or expression preview. */ + status?: string; + /** The result data of data preview, statistics or expression preview. */ + data?: string; +} + /** A data flow transformation. */ export interface Transformation { /** Transformation name. */ @@ -2502,7 +2613,7 @@ export interface ScriptAction { /** The URI for the script action. */ uri: string; /** The node types on which the script action should be executed. */ - roles: HdiNodeTypes; + roles: any; /** The parameters for the script action. */ parameters?: string; } @@ -2645,6 +2756,7 @@ export interface CopySource { | "RestSource" | "SqlSource" | "SqlServerSource" + | "AmazonRdsForSqlServerSource" | "AzureSqlSource" | "SqlMISource" | "SqlDWSource" @@ -2653,6 +2765,7 @@ export interface CopySource { | "AzureMySqlSource" | "AzureDataExplorerSource" | "OracleSource" + | "AmazonRdsForOracleSource" | "TeradataSource" | "WebSource" | "CassandraSource" @@ -2712,14 +2825,6 @@ export interface CopySource { maxConcurrentConnections?: any; } -/** Specify the column name and value of additional columns. */ -export interface AdditionalColumns { - /** Additional column name. Type: string (or Expression with resultType string). */ - name?: any; - /** Additional column value. Type: string (or Expression with resultType string). */ - value?: any; -} - /** A copy activity sink. */ export interface CopySink { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -2877,13 +2982,25 @@ export interface SqlPartitionSettings { partitionColumnName?: any; /** The maximum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). */ partitionUpperBound?: any; - /** The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). */ + /** The minimum value of the partition column for partition range splitting. This value is used to decide the partition stride, not for filtering the rows in table. All rows in the table or query result will be partitioned and copied. Type: string (or Expression with resultType string). */ + partitionLowerBound?: any; +} + +/** The settings that will be leveraged for Oracle source partitioning. */ +export interface OraclePartitionSettings { + /** Names of the physical partitions of Oracle table. */ + partitionNames?: any; + /** The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ + partitionColumnName?: any; + /** The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ + partitionUpperBound?: any; + /** The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ partitionLowerBound?: any; } -/** The settings that will be leveraged for Oracle source partitioning. */ -export interface OraclePartitionSettings { - /** Names of the physical partitions of Oracle table. */ +/** The settings that will be leveraged for AmazonRdsForOracle source partitioning. */ +export interface AmazonRdsForOraclePartitionSettings { + /** Names of the physical partitions of AmazonRdsForOracle table. */ partitionNames?: any; /** The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). */ partitionColumnName?: any; @@ -2981,6 +3098,14 @@ export interface DWCopyCommandDefaultValue { defaultValue?: any; } +/** Specify the column name and value of additional columns. */ +export interface AdditionalColumns { + /** Additional column name. Type: string (or Expression with resultType string). */ + name?: any; + /** Additional column value. Type: string (or Expression with resultType string). */ + value?: any; +} + /** A copy activity translator. */ export interface CopyTranslator { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -3230,6 +3355,8 @@ export interface IntegrationRuntimeDataFlowProperties { coreCount?: number; /** Time to live (in minutes) setting of the cluster which will execute data flow job. */ timeToLive?: number; + /** Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Default is true. */ + cleanup?: boolean; } /** VNet properties for managed integration runtime. */ @@ -3342,2956 +3469,3069 @@ export type TrackedResource = Resource & { /** The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location */ export type ProxyResource = Resource & {}; -/** The storage account linked service. */ -export type AzureStorageLinkedService = LinkedService & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureStorage"; - /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The Azure key vault secret reference of accountKey in connection string. */ - accountKey?: AzureKeyVaultSecretReference; - /** SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */ - sasUri?: any; - /** The Azure key vault secret reference of sasToken in sas uri. */ - sasToken?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: string; -}; - -/** The azure blob storage linked service. */ -export type AzureBlobStorageLinkedService = LinkedService & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureBlobStorage"; - /** The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The Azure key vault secret reference of accountKey in connection string. */ - accountKey?: AzureKeyVaultSecretReference; - /** SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. */ - sasUri?: any; - /** The Azure key vault secret reference of sasToken in sas uri. */ - sasToken?: AzureKeyVaultSecretReference; - /** Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. */ - serviceEndpoint?: string; - /** The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** The key of the service principal used to authenticate against Azure SQL Data Warehouse. */ - servicePrincipalKey?: SecretBaseUnion; - /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ - tenant?: any; - /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ - azureCloudType?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: string; -}; - -/** The azure table storage linked service. */ -export type AzureTableStorageLinkedService = LinkedService & { +/** Mapping data flow. */ +export type MappingDataFlow = DataFlow & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureTableStorage"; - /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The Azure key vault secret reference of accountKey in connection string. */ - accountKey?: AzureKeyVaultSecretReference; - /** SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */ - sasUri?: any; - /** The Azure key vault secret reference of sasToken in sas uri. */ - sasToken?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: string; + type: "MappingDataFlow"; + /** List of sources in data flow. */ + sources?: DataFlowSource[]; + /** List of sinks in data flow. */ + sinks?: DataFlowSink[]; + /** List of transformations in data flow. */ + transformations?: Transformation[]; + /** DataFlow script. */ + script?: string; }; -/** Azure SQL Data Warehouse linked service. */ -export type AzureSqlDWLinkedService = LinkedService & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureSqlDW"; - /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString: any; - /** The Azure key vault secret reference of password in connection string. */ - password?: AzureKeyVaultSecretReference; - /** The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** The key of the service principal used to authenticate against Azure SQL Data Warehouse. */ - servicePrincipalKey?: SecretBaseUnion; - /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ - tenant?: any; - /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ - azureCloudType?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; +/** Integration runtime debug resource. */ +export type IntegrationRuntimeDebugResource = SubResourceDebugResource & { + /** Integration runtime properties. */ + properties: IntegrationRuntimeUnion; }; -/** SQL Server linked service. */ -export type SqlServerLinkedService = LinkedService & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SqlServer"; - /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString: any; - /** The on-premises Windows authentication user name. Type: string (or Expression with resultType string). */ - userName?: any; - /** The on-premises Windows authentication password. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; +/** Data flow debug resource. */ +export type DataFlowDebugResource = SubResourceDebugResource & { + /** Data flow properties. */ + properties: DataFlowUnion; }; -/** Microsoft Azure SQL Database linked service. */ -export type AzureSqlDatabaseLinkedService = LinkedService & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureSqlDatabase"; - /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString: any; - /** The Azure key vault secret reference of password in connection string. */ - password?: AzureKeyVaultSecretReference; - /** The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** The key of the service principal used to authenticate against Azure SQL Database. */ - servicePrincipalKey?: SecretBaseUnion; - /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ - tenant?: any; - /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ - azureCloudType?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; +/** Dataset debug resource. */ +export type DatasetDebugResource = SubResourceDebugResource & { + /** Dataset properties. */ + properties: DatasetUnion; }; -/** Azure SQL Managed Instance linked service. */ -export type AzureSqlMILinkedService = LinkedService & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureSqlMI"; - /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString: any; - /** The Azure key vault secret reference of password in connection string. */ - password?: AzureKeyVaultSecretReference; - /** The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** The key of the service principal used to authenticate against Azure SQL Managed Instance. */ - servicePrincipalKey?: SecretBaseUnion; - /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ - tenant?: any; - /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ - azureCloudType?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; +/** Linked service debug resource. */ +export type LinkedServiceDebugResource = SubResourceDebugResource & { + /** Properties of linked service. */ + properties: LinkedServiceUnion; }; -/** Azure Batch linked service. */ -export type AzureBatchLinkedService = LinkedService & { +/** Managed integration runtime, including managed elastic and managed dedicated integration runtimes. */ +export type ManagedIntegrationRuntime = IntegrationRuntime & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureBatch"; - /** The Azure Batch account name. Type: string (or Expression with resultType string). */ - accountName: any; - /** The Azure Batch account access key. */ - accessKey?: SecretBaseUnion; - /** The Azure Batch URI. Type: string (or Expression with resultType string). */ - batchUri: any; - /** The Azure Batch pool name. Type: string (or Expression with resultType string). */ - poolName: any; - /** The Azure Storage linked service reference. */ - linkedServiceName: LinkedServiceReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "Managed"; + /** + * Integration runtime state, only valid for managed dedicated integration runtime. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly state?: IntegrationRuntimeState; + /** Managed Virtual Network reference. */ + managedVirtualNetwork?: ManagedVirtualNetworkReference; + /** The compute resource for managed integration runtime. */ + computeProperties?: IntegrationRuntimeComputeProperties; + /** SSIS properties for managed integration runtime. */ + ssisProperties?: IntegrationRuntimeSsisProperties; }; -/** Azure Key Vault linked service. */ -export type AzureKeyVaultLinkedService = LinkedService & { +/** Self-hosted integration runtime. */ +export type SelfHostedIntegrationRuntime = IntegrationRuntime & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureKeyVault"; - /** The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). */ - baseUrl: any; + type: "SelfHosted"; + /** Linked integration runtime type from data factory */ + linkedInfo?: LinkedIntegrationRuntimeTypeUnion; }; -/** Microsoft Azure Cosmos Database (CosmosDB) linked service. */ -export type CosmosDbLinkedService = LinkedService & { +/** A single Amazon Simple Storage Service (S3) object or a set of S3 objects. */ +export type AmazonS3Dataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "CosmosDb"; - /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string) */ - accountEndpoint?: any; - /** The name of the database. Type: string (or Expression with resultType string) */ - database?: any; - /** The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. */ - accountKey?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AmazonS3Object"; + /** The name of the Amazon S3 bucket. Type: string (or Expression with resultType string). */ + bucketName: any; + /** The key of the Amazon S3 object. Type: string (or Expression with resultType string). */ + key?: any; + /** The prefix filter for the S3 object name. Type: string (or Expression with resultType string). */ + prefix?: any; + /** The version for the S3 object. Type: string (or Expression with resultType string). */ + version?: any; + /** The start of S3 object's modified datetime. Type: string (or Expression with resultType string). */ + modifiedDatetimeStart?: any; + /** The end of S3 object's modified datetime. Type: string (or Expression with resultType string). */ + modifiedDatetimeEnd?: any; + /** The format of files. */ + format?: DatasetStorageFormatUnion; + /** The data compression method used for the Amazon S3 object. */ + compression?: DatasetCompressionUnion; }; -/** Dynamics linked service. */ -export type DynamicsLinkedService = LinkedService & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Dynamics"; - /** The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). */ - deploymentType: DynamicsDeploymentType; - /** The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */ - hostName?: any; - /** The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */ - port?: any; - /** The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */ - serviceUri?: any; - /** The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). */ - organizationName?: any; - /** The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */ - authenticationType: DynamicsAuthenticationType; - /** User name to access the Dynamics instance. Type: string (or Expression with resultType string). */ - username?: any; - /** Password to access the Dynamics instance. */ - password?: SecretBaseUnion; - /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ - servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; - /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ - servicePrincipalCredential?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; +/** Avro dataset. */ +export type AvroDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "Avro"; + /** The location of the avro storage. */ + location?: DatasetLocationUnion; + /** A string from AvroCompressionCodecEnum or an expression */ + avroCompressionCodec?: any; + avroCompressionLevel?: number; }; -/** Dynamics CRM linked service. */ -export type DynamicsCrmLinkedService = LinkedService & { +/** Excel dataset. */ +export type ExcelDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "DynamicsCrm"; - /** The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string (or Expression with resultType string). */ - deploymentType: DynamicsDeploymentType; - /** The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */ - hostName?: any; - /** The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */ - port?: any; - /** The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */ - serviceUri?: any; - /** The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). */ - organizationName?: any; - /** The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */ - authenticationType: DynamicsAuthenticationType; - /** User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). */ - username?: any; - /** Password to access the Dynamics CRM instance. */ - password?: SecretBaseUnion; - /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** A string from ServicePrincipalCredentialEnum or an expression */ - servicePrincipalCredentialType?: any; - /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ - servicePrincipalCredential?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "Excel"; + /** The location of the excel storage. */ + location?: DatasetLocationUnion; + /** The sheet of excel file. Type: string (or Expression with resultType string). */ + sheetName?: any; + /** The partial data of one sheet. Type: string (or Expression with resultType string). */ + range?: any; + /** When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */ + firstRowAsHeader?: any; + /** The data compression method used for the json dataset. */ + compression?: DatasetCompressionUnion; + /** The null value string. Type: string (or Expression with resultType string). */ + nullValue?: any; }; -/** Common Data Service for Apps linked service. */ -export type CommonDataServiceForAppsLinkedService = LinkedService & { +/** Parquet dataset. */ +export type ParquetDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "CommonDataServiceForApps"; - /** The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType string). */ - deploymentType: DynamicsDeploymentType; - /** The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */ - hostName?: any; - /** The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */ - port?: any; - /** The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */ - serviceUri?: any; - /** The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). */ - organizationName?: any; - /** The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */ - authenticationType: DynamicsAuthenticationType; - /** User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). */ - username?: any; - /** Password to access the Common Data Service for Apps instance. */ - password?: SecretBaseUnion; - /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** A string from ServicePrincipalCredentialEnum or an expression */ - servicePrincipalCredentialType?: any; - /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ - servicePrincipalCredential?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "Parquet"; + /** The location of the parquet storage. */ + location?: DatasetLocationUnion; + /** A string from ParquetCompressionCodecEnum or an expression */ + compressionCodec?: any; }; -/** HDInsight linked service. */ -export type HDInsightLinkedService = LinkedService & { +/** Delimited text dataset. */ +export type DelimitedTextDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "HDInsight"; - /** HDInsight cluster URI. Type: string (or Expression with resultType string). */ - clusterUri: any; - /** HDInsight cluster user name. Type: string (or Expression with resultType string). */ - userName?: any; - /** HDInsight cluster password. */ - password?: SecretBaseUnion; - /** The Azure Storage linked service reference. */ - linkedServiceName?: LinkedServiceReference; - /** A reference to the Azure SQL linked service that points to the HCatalog database. */ - hcatalogLinkedServiceName?: LinkedServiceReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; - /** Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. */ - isEspEnabled?: any; - /** Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). */ - fileSystem?: any; + type: "DelimitedText"; + /** The location of the delimited text storage. */ + location?: DatasetLocationUnion; + /** The column delimiter. Type: string (or Expression with resultType string). */ + columnDelimiter?: any; + /** The row delimiter. Type: string (or Expression with resultType string). */ + rowDelimiter?: any; + /** The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */ + encodingName?: any; + /** The data compressionCodec. Type: string (or Expression with resultType string). */ + compressionCodec?: any; + /** The data compression method used for DelimitedText. */ + compressionLevel?: any; + /** The quote character. Type: string (or Expression with resultType string). */ + quoteChar?: any; + /** The escape character. Type: string (or Expression with resultType string). */ + escapeChar?: any; + /** When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */ + firstRowAsHeader?: any; + /** The null value string. Type: string (or Expression with resultType string). */ + nullValue?: any; }; -/** File system linked service. */ -export type FileServerLinkedService = LinkedService & { +/** Json dataset. */ +export type JsonDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "FileServer"; - /** Host name of the server. Type: string (or Expression with resultType string). */ - host: any; - /** User ID to logon the server. Type: string (or Expression with resultType string). */ - userId?: any; - /** Password to logon the server. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "Json"; + /** The location of the json data storage. */ + location?: DatasetLocationUnion; + /** The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */ + encodingName?: any; + /** The data compression method used for the json dataset. */ + compression?: DatasetCompressionUnion; }; -/** Azure File Storage linked service. */ -export type AzureFileStorageLinkedService = LinkedService & { +/** Xml dataset. */ +export type XmlDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureFileStorage"; - /** Host name of the server. Type: string (or Expression with resultType string). */ - host: any; - /** User ID to logon the server. Type: string (or Expression with resultType string). */ - userId?: any; - /** Password to logon the server. */ - password?: SecretBaseUnion; - /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The Azure key vault secret reference of accountKey in connection string. */ - accountKey?: AzureKeyVaultSecretReference; - /** SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */ - sasUri?: any; - /** The Azure key vault secret reference of sasToken in sas uri. */ - sasToken?: AzureKeyVaultSecretReference; - /** The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). */ - fileShare?: any; - /** The azure file share snapshot version. Type: string (or Expression with resultType string). */ - snapshot?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "Xml"; + /** The location of the json data storage. */ + location?: DatasetLocationUnion; + /** The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */ + encodingName?: any; + /** The null value string. Type: string (or Expression with resultType string). */ + nullValue?: any; + /** The data compression method used for the json dataset. */ + compression?: DatasetCompressionUnion; }; -/** Linked service for Google Cloud Storage. */ -export type GoogleCloudStorageLinkedService = LinkedService & { +/** ORC dataset. */ +export type OrcDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "GoogleCloudStorage"; - /** The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */ - accessKeyId?: any; - /** The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. */ - secretAccessKey?: SecretBaseUnion; - /** This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */ - serviceUrl?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "Orc"; + /** The location of the ORC data storage. */ + location?: DatasetLocationUnion; + /** The data orcCompressionCodec. Type: string (or Expression with resultType string). */ + orcCompressionCodec?: any; }; -/** Oracle database. */ -export type OracleLinkedService = LinkedService & { +/** Binary dataset. */ +export type BinaryDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Oracle"; - /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString: any; - /** The Azure key vault secret reference of password in connection string. */ - password?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "Binary"; + /** The location of the Binary storage. */ + location?: DatasetLocationUnion; + /** The data compression method used for the binary dataset. */ + compression?: DatasetCompressionUnion; }; -/** Azure MySQL database linked service. */ -export type AzureMySqlLinkedService = LinkedService & { +/** The Azure Blob storage. */ +export type AzureBlobDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureMySql"; - /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString: any; - /** The Azure key vault secret reference of password in connection string. */ - password?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AzureBlob"; + /** The path of the Azure Blob storage. Type: string (or Expression with resultType string). */ + folderPath?: any; + /** The root of blob path. Type: string (or Expression with resultType string). */ + tableRootLocation?: any; + /** The name of the Azure Blob. Type: string (or Expression with resultType string). */ + fileName?: any; + /** The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). */ + modifiedDatetimeStart?: any; + /** The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). */ + modifiedDatetimeEnd?: any; + /** The format of the Azure Blob storage. */ + format?: DatasetStorageFormatUnion; + /** The data compression method used for the blob storage. */ + compression?: DatasetCompressionUnion; }; -/** Linked service for MySQL data source. */ -export type MySqlLinkedService = LinkedService & { +/** The Azure Table storage dataset. */ +export type AzureTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MySql"; - /** The connection string. */ - connectionString: any; - /** The Azure key vault secret reference of password in connection string. */ - password?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AzureTable"; + /** The table name of the Azure Table storage. Type: string (or Expression with resultType string). */ + tableName: any; +}; + +/** The Azure SQL Server database dataset. */ +export type AzureSqlTableDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureSqlTable"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The schema name of the Azure SQL database. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; + /** The table name of the Azure SQL database. Type: string (or Expression with resultType string). */ + table?: any; }; -/** Linked service for PostgreSQL data source. */ -export type PostgreSqlLinkedService = LinkedService & { +/** The Azure SQL Managed Instance dataset. */ +export type AzureSqlMITableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "PostgreSql"; - /** The connection string. */ - connectionString: any; - /** The Azure key vault secret reference of password in connection string. */ - password?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AzureSqlMITable"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; + /** The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). */ + table?: any; }; -/** Linked service for Sybase data source. */ -export type SybaseLinkedService = LinkedService & { +/** The Azure SQL Data Warehouse dataset. */ +export type AzureSqlDWTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Sybase"; - /** Server name for connection. Type: string (or Expression with resultType string). */ - server: any; - /** Database name for connection. Type: string (or Expression with resultType string). */ - database: any; - /** Schema name for connection. Type: string (or Expression with resultType string). */ - schema?: any; - /** AuthenticationType to be used for connection. */ - authenticationType?: SybaseAuthenticationType; - /** Username for authentication. Type: string (or Expression with resultType string). */ - username?: any; - /** Password for authentication. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AzureSqlDWTable"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; + /** The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */ + table?: any; }; -/** Linked service for DB2 data source. */ -export type Db2LinkedService = LinkedService & { +/** The Cassandra database dataset. */ +export type CassandraTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Db2"; - /** The connection string. It is mutually exclusive with server, database, authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ - server: any; - /** Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ - database: any; - /** AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. */ - authenticationType?: Db2AuthenticationType; - /** Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ - username?: any; - /** Password for authentication. */ - password?: SecretBaseUnion; - /** Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ - packageCollection?: any; - /** Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ - certificateCommonName?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "CassandraTable"; + /** The table name of the Cassandra database. Type: string (or Expression with resultType string). */ + tableName?: any; + /** The keyspace of the Cassandra database. Type: string (or Expression with resultType string). */ + keyspace?: any; }; -/** Linked service for Teradata data source. */ -export type TeradataLinkedService = LinkedService & { +/** The custom dataset. */ +export type CustomDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Teradata"; - /** Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** Server name for connection. Type: string (or Expression with resultType string). */ - server?: any; - /** AuthenticationType to be used for connection. */ - authenticationType?: TeradataAuthenticationType; - /** Username for authentication. Type: string (or Expression with resultType string). */ - username?: any; - /** Password for authentication. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "CustomDataset"; + /** Custom dataset properties. */ + typeProperties?: any; }; -/** Azure ML Studio Web Service linked service. */ -export type AzureMLLinkedService = LinkedService & { +/** Microsoft Azure CosmosDB (SQL API) Collection dataset. */ +export type CosmosDbSqlApiCollectionDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureML"; - /** The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). */ - mlEndpoint: any; - /** The API key for accessing the Azure ML model endpoint. */ - apiKey: SecretBaseUnion; - /** The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). */ - updateResourceEndpoint?: any; - /** The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. */ - servicePrincipalKey?: SecretBaseUnion; - /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ - tenant?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "CosmosDbSqlApiCollection"; + /** CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). */ + collectionName: any; }; -/** Azure ML Service linked service. */ -export type AzureMLServiceLinkedService = LinkedService & { +/** Microsoft Azure Document Database Collection dataset. */ +export type DocumentDbCollectionDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureMLService"; - /** Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). */ - subscriptionId: any; - /** Azure ML Service workspace resource group name. Type: string (or Expression with resultType string). */ - resourceGroupName: any; - /** Azure ML Service workspace name. Type: string (or Expression with resultType string). */ - mlWorkspaceName: any; - /** The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. */ - servicePrincipalKey?: SecretBaseUnion; - /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ - tenant?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "DocumentDbCollection"; + /** Document Database collection name. Type: string (or Expression with resultType string). */ + collectionName: any; }; -/** Open Database Connectivity (ODBC) linked service. */ -export type OdbcLinkedService = LinkedService & { +/** The Dynamics entity dataset. */ +export type DynamicsEntityDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Odbc"; - /** The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString: any; - /** Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */ - authenticationType?: any; - /** The access credential portion of the connection string specified in driver-specific property-value format. */ - credential?: SecretBaseUnion; - /** User name for Basic authentication. Type: string (or Expression with resultType string). */ - userName?: any; - /** Password for Basic authentication. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "DynamicsEntity"; + /** The logical name of the entity. Type: string (or Expression with resultType string). */ + entityName?: any; }; -/** Informix linked service. */ -export type InformixLinkedService = LinkedService & { +/** The Dynamics CRM entity dataset. */ +export type DynamicsCrmEntityDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Informix"; - /** The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString: any; - /** Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */ - authenticationType?: any; - /** The access credential portion of the connection string specified in driver-specific property-value format. */ - credential?: SecretBaseUnion; - /** User name for Basic authentication. Type: string (or Expression with resultType string). */ - userName?: any; - /** Password for Basic authentication. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "DynamicsCrmEntity"; + /** The logical name of the entity. Type: string (or Expression with resultType string). */ + entityName?: any; }; -/** Microsoft Access linked service. */ -export type MicrosoftAccessLinkedService = LinkedService & { +/** The Common Data Service for Apps entity dataset. */ +export type CommonDataServiceForAppsEntityDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MicrosoftAccess"; - /** The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString: any; - /** Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */ - authenticationType?: any; - /** The access credential portion of the connection string specified in driver-specific property-value format. */ - credential?: SecretBaseUnion; - /** User name for Basic authentication. Type: string (or Expression with resultType string). */ - userName?: any; - /** Password for Basic authentication. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "CommonDataServiceForAppsEntity"; + /** The logical name of the entity. Type: string (or Expression with resultType string). */ + entityName?: any; }; -/** Hadoop Distributed File System (HDFS) linked service. */ -export type HdfsLinkedService = LinkedService & { +/** Azure Data Lake Store dataset. */ +export type AzureDataLakeStoreDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Hdfs"; - /** The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). */ - url: any; - /** Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). */ - authenticationType?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; - /** User name for Windows authentication. Type: string (or Expression with resultType string). */ - userName?: any; - /** Password for Windows authentication. */ - password?: SecretBaseUnion; + type: "AzureDataLakeStoreFile"; + /** Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). */ + folderPath?: any; + /** The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). */ + fileName?: any; + /** The format of the Data Lake Store. */ + format?: DatasetStorageFormatUnion; + /** The data compression method used for the item(s) in the Azure Data Lake Store. */ + compression?: DatasetCompressionUnion; }; -/** Open Data Protocol (OData) linked service. */ -export type ODataLinkedService = LinkedService & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "OData"; - /** The URL of the OData service endpoint. Type: string (or Expression with resultType string). */ - url: any; - /** Type of authentication used to connect to the OData service. */ - authenticationType?: ODataAuthenticationType; - /** User name of the OData service. Type: string (or Expression with resultType string). */ - userName?: any; - /** Password of the OData service. */ - password?: SecretBaseUnion; - /** Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). */ - tenant?: any; - /** Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ - azureCloudType?: any; - /** Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). */ - aadResourceId?: any; - /** Specify the credential type (key or cert) is used for service principal. */ - aadServicePrincipalCredentialType?: ODataAadServicePrincipalCredentialType; - /** Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ - servicePrincipalKey?: SecretBaseUnion; - /** Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ - servicePrincipalEmbeddedCert?: SecretBaseUnion; - /** Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). */ - servicePrincipalEmbeddedCertPassword?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; +/** The Azure Data Lake Storage Gen2 storage. */ +export type AzureBlobFSDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureBlobFSFile"; + /** The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). */ + folderPath?: any; + /** The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with resultType string). */ + fileName?: any; + /** The format of the Azure Data Lake Storage Gen2 storage. */ + format?: DatasetStorageFormatUnion; + /** The data compression method used for the blob storage. */ + compression?: DatasetCompressionUnion; }; -/** Web linked service. */ -export type WebLinkedService = LinkedService & { +/** The Office365 account. */ +export type Office365Dataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Web"; - /** Web linked service properties. */ - typeProperties: WebLinkedServiceTypePropertiesUnion; + type: "Office365Table"; + /** Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). */ + tableName: any; + /** A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with resultType string). */ + predicate?: any; }; -/** Linked service for Cassandra data source. */ -export type CassandraLinkedService = LinkedService & { +/** An on-premises file system dataset. */ +export type FileShareDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Cassandra"; - /** Host name for connection. Type: string (or Expression with resultType string). */ - host: any; - /** AuthenticationType to be used for connection. Type: string (or Expression with resultType string). */ - authenticationType?: any; - /** The port for the connection. Type: integer (or Expression with resultType integer). */ - port?: any; - /** Username for authentication. Type: string (or Expression with resultType string). */ - username?: any; - /** Password for authentication. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "FileShare"; + /** The path of the on-premises file system. Type: string (or Expression with resultType string). */ + folderPath?: any; + /** The name of the on-premises file system. Type: string (or Expression with resultType string). */ + fileName?: any; + /** The start of file's modified datetime. Type: string (or Expression with resultType string). */ + modifiedDatetimeStart?: any; + /** The end of file's modified datetime. Type: string (or Expression with resultType string). */ + modifiedDatetimeEnd?: any; + /** The format of the files. */ + format?: DatasetStorageFormatUnion; + /** Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). */ + fileFilter?: any; + /** The data compression method used for the file system. */ + compression?: DatasetCompressionUnion; }; -/** Linked service for MongoDb data source. */ -export type MongoDbLinkedService = LinkedService & { +/** The MongoDB database dataset. */ +export type MongoDbCollectionDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MongoDb"; - /** The IP address or server name of the MongoDB server. Type: string (or Expression with resultType string). */ - server: any; - /** The authentication type to be used to connect to the MongoDB database. */ - authenticationType?: MongoDbAuthenticationType; - /** The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). */ - databaseName: any; - /** Username for authentication. Type: string (or Expression with resultType string). */ - username?: any; - /** Password for authentication. */ - password?: SecretBaseUnion; - /** Database to verify the username and password. Type: string (or Expression with resultType string). */ - authSource?: any; - /** The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. */ - port?: any; - /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). */ - enableSsl?: any; - /** Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). */ - allowSelfSignedServerCert?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "MongoDbCollection"; + /** The table name of the MongoDB database. Type: string (or Expression with resultType string). */ + collectionName: any; }; -/** Linked service for MongoDB Atlas data source. */ -export type MongoDbAtlasLinkedService = LinkedService & { +/** The MongoDB Atlas database dataset. */ +export type MongoDbAtlasCollectionDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MongoDbAtlas"; - /** The MongoDB Atlas connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString: any; - /** The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType string). */ - database: any; + type: "MongoDbAtlasCollection"; + /** The collection name of the MongoDB Atlas database. Type: string (or Expression with resultType string). */ + collection: any; }; -/** Linked service for MongoDB data source. */ -export type MongoDbV2LinkedService = LinkedService & { +/** The MongoDB database dataset. */ +export type MongoDbV2CollectionDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MongoDbV2"; - /** The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString: any; - /** The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). */ - database: any; + type: "MongoDbV2Collection"; + /** The collection name of the MongoDB database. Type: string (or Expression with resultType string). */ + collection: any; }; -/** Linked service for CosmosDB (MongoDB API) data source. */ -export type CosmosDbMongoDbApiLinkedService = LinkedService & { +/** The CosmosDB (MongoDB API) database dataset. */ +export type CosmosDbMongoDbApiCollectionDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "CosmosDbMongoDbApi"; - /** The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString: any; - /** The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with resultType string). */ - database: any; + type: "CosmosDbMongoDbApiCollection"; + /** The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). */ + collection: any; }; -/** Azure Data Lake Store linked service. */ -export type AzureDataLakeStoreLinkedService = LinkedService & { +/** The Open Data Protocol (OData) resource dataset. */ +export type ODataResourceDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureDataLakeStore"; - /** Data Lake Store service URI. Type: string (or Expression with resultType string). */ - dataLakeStoreUri: any; - /** The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** The Key of the application used to authenticate against the Azure Data Lake Store account. */ - servicePrincipalKey?: SecretBaseUnion; - /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ - tenant?: any; - /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ - azureCloudType?: any; - /** Data Lake Store account name. Type: string (or Expression with resultType string). */ - accountName?: any; - /** Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). */ - subscriptionId?: any; - /** Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). */ - resourceGroupName?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "ODataResource"; + /** The OData resource path. Type: string (or Expression with resultType string). */ + path?: any; }; -/** Azure Data Lake Storage Gen2 linked service. */ -export type AzureBlobFSLinkedService = LinkedService & { +/** The on-premises Oracle database dataset. */ +export type OracleTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureBlobFS"; - /** Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). */ - url: any; - /** Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). */ - accountKey?: any; - /** The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. */ - servicePrincipalKey?: SecretBaseUnion; - /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ - tenant?: any; - /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ - azureCloudType?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "OracleTable"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; + /** The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). */ + table?: any; }; -/** Office365 linked service. */ -export type Office365LinkedService = LinkedService & { +/** The AmazonRdsForOracle database dataset. */ +export type AmazonRdsForOracleTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Office365"; - /** Azure tenant ID to which the Office 365 account belongs. Type: string (or Expression with resultType string). */ - office365TenantId: any; - /** Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression with resultType string). */ - servicePrincipalTenantId: any; - /** Specify the application's client ID. Type: string (or Expression with resultType string). */ - servicePrincipalId: any; - /** Specify the application's key. */ - servicePrincipalKey: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AmazonRdsForOracleTable"; + /** The schema name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; + /** The table name of the AmazonRdsForOracle database. Type: string (or Expression with resultType string). */ + table?: any; }; -/** Linked service for Salesforce. */ -export type SalesforceLinkedService = LinkedService & { +/** The Teradata database dataset. */ +export type TeradataTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Salesforce"; - /** The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */ - environmentUrl?: any; - /** The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). */ - username?: any; - /** The password for Basic authentication of the Salesforce instance. */ - password?: SecretBaseUnion; - /** The security token is optional to remotely access Salesforce instance. */ - securityToken?: SecretBaseUnion; - /** The Salesforce API version used in ADF. Type: string (or Expression with resultType string). */ - apiVersion?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "TeradataTable"; + /** The database name of Teradata. Type: string (or Expression with resultType string). */ + database?: any; + /** The table name of Teradata. Type: string (or Expression with resultType string). */ + table?: any; }; -/** Linked service for Salesforce Service Cloud. */ -export type SalesforceServiceCloudLinkedService = LinkedService & { +/** The Azure MySQL database dataset. */ +export type AzureMySqlTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SalesforceServiceCloud"; - /** The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */ - environmentUrl?: any; - /** The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). */ - username?: any; - /** The password for Basic authentication of the Salesforce instance. */ - password?: SecretBaseUnion; - /** The security token is optional to remotely access Salesforce instance. */ - securityToken?: SecretBaseUnion; - /** The Salesforce API version used in ADF. Type: string (or Expression with resultType string). */ - apiVersion?: any; - /** Extended properties appended to the connection string. Type: string (or Expression with resultType string). */ - extendedProperties?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AzureMySqlTable"; + /** The Azure MySQL database table name. Type: string (or Expression with resultType string). */ + tableName?: any; + /** The name of Azure MySQL database table. Type: string (or Expression with resultType string). */ + table?: any; }; -/** Linked service for SAP Cloud for Customer. */ -export type SapCloudForCustomerLinkedService = LinkedService & { +/** The Amazon Redshift table dataset. */ +export type AmazonRedshiftTableDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AmazonRedshiftTable"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The Amazon Redshift table name. Type: string (or Expression with resultType string). */ + table?: any; + /** The Amazon Redshift schema name. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; +}; + +/** The Db2 table dataset. */ +export type Db2TableDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "Db2Table"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The Db2 schema name. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; + /** The Db2 table name. Type: string (or Expression with resultType string). */ + table?: any; +}; + +/** The relational table dataset. */ +export type RelationalTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SapCloudForCustomer"; - /** The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). */ - url: any; - /** The username for Basic authentication. Type: string (or Expression with resultType string). */ - username?: any; - /** The password for Basic authentication. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "RelationalTable"; + /** The relational table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Linked service for SAP ERP Central Component(SAP ECC). */ -export type SapEccLinkedService = LinkedService & { +/** The Informix table dataset. */ +export type InformixTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SapEcc"; - /** The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). */ - url: string; - /** The username for Basic authentication. Type: string (or Expression with resultType string). */ - username?: string; - /** The password for Basic authentication. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). */ - encryptedCredential?: string; + type: "InformixTable"; + /** The Informix table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** SAP Business Warehouse Open Hub Destination Linked Service. */ -export type SapOpenHubLinkedService = LinkedService & { +/** The ODBC table dataset. */ +export type OdbcTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SapOpenHub"; - /** Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with resultType string). */ - server: any; - /** System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */ - systemNumber: any; - /** Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */ - clientId: any; - /** Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). */ - language?: any; - /** SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */ - systemId?: any; - /** Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). */ - userName?: any; - /** Password to access the SAP BW server where the open hub destination is located. */ - password?: SecretBaseUnion; - /** The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */ - messageServer?: any; - /** The service name or port number of the Message Server. Type: string (or Expression with resultType string). */ - messageServerService?: any; - /** The Logon Group for the SAP System. Type: string (or Expression with resultType string). */ - logonGroup?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "OdbcTable"; + /** The ODBC table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Rest Service linked service. */ -export type RestServiceLinkedService = LinkedService & { +/** The MySQL table dataset. */ +export type MySqlTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "RestService"; - /** The base URL of the REST service. */ - url: any; - /** Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). */ - enableServerCertificateValidation?: any; - /** Type of authentication used to connect to the REST service. */ - authenticationType: RestServiceAuthenticationType; - /** The user name used in Basic authentication type. */ - userName?: any; - /** The password used in Basic authentication type. */ - password?: SecretBaseUnion; - /** The application's client ID used in AadServicePrincipal authentication type. */ - servicePrincipalId?: any; - /** The application's key used in AadServicePrincipal authentication type. */ - servicePrincipalKey?: SecretBaseUnion; - /** The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. */ - tenant?: any; - /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ - azureCloudType?: any; - /** The resource you are requesting authorization to use. */ - aadResourceId?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "MySqlTable"; + /** The MySQL table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Linked service for Amazon S3. */ -export type AmazonS3LinkedService = LinkedService & { +/** The PostgreSQL table dataset. */ +export type PostgreSqlTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AmazonS3"; - /** The authentication type of S3. Allowed value: AccessKey (default) or TemporarySecurityCredentials. Type: string (or Expression with resultType string). */ - authenticationType?: any; - /** The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */ - accessKeyId?: any; - /** The secret access key of the Amazon S3 Identity and Access Management (IAM) user. */ - secretAccessKey?: SecretBaseUnion; - /** This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */ - serviceUrl?: any; - /** The session token for the S3 temporary security credential. */ - sessionToken?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "PostgreSqlTable"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The PostgreSQL table name. Type: string (or Expression with resultType string). */ + table?: any; + /** The PostgreSQL schema name. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; }; -/** Linked service for Amazon Redshift. */ -export type AmazonRedshiftLinkedService = LinkedService & { +/** The Microsoft Access table dataset. */ +export type MicrosoftAccessTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AmazonRedshift"; - /** The name of the Amazon Redshift server. Type: string (or Expression with resultType string). */ - server: any; - /** The username of the Amazon Redshift source. Type: string (or Expression with resultType string). */ - username?: any; - /** The password of the Amazon Redshift source. */ - password?: SecretBaseUnion; - /** The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). */ - database: any; - /** The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). */ - port?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "MicrosoftAccessTable"; + /** The Microsoft Access table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Custom linked service. */ -export type CustomDataSourceLinkedService = LinkedService & { +/** The Salesforce object dataset. */ +export type SalesforceObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "CustomDataSource"; - /** Custom linked service properties. */ - typeProperties: any; + type: "SalesforceObject"; + /** The Salesforce object API name. Type: string (or Expression with resultType string). */ + objectApiName?: any; }; -/** Linked service for Windows Azure Search Service. */ -export type AzureSearchLinkedService = LinkedService & { +/** The Salesforce Service Cloud object dataset. */ +export type SalesforceServiceCloudObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureSearch"; - /** URL for Azure Search service. Type: string (or Expression with resultType string). */ - url: any; - /** Admin Key for Azure Search service */ - key?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "SalesforceServiceCloudObject"; + /** The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). */ + objectApiName?: any; }; -/** Linked service for an HTTP source. */ -export type HttpLinkedService = LinkedService & { +/** The Sybase table dataset. */ +export type SybaseTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "HttpServer"; - /** The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). */ - url: any; - /** The authentication type to be used to connect to the HTTP server. */ - authenticationType?: HttpAuthenticationType; - /** User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). */ - userName?: any; - /** Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. */ - password?: SecretBaseUnion; - /** Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). */ - embeddedCertData?: any; - /** Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). */ - certThumbprint?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; - /** If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). */ - enableServerCertificateValidation?: any; + type: "SybaseTable"; + /** The Sybase table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** A FTP server Linked Service. */ -export type FtpServerLinkedService = LinkedService & { +/** The SAP BW cube dataset. */ +export type SapBwCubeDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "FtpServer"; - /** Host name of the FTP server. Type: string (or Expression with resultType string). */ - host: any; - /** The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. */ - port?: any; - /** The authentication type to be used to connect to the FTP server. */ - authenticationType?: FtpAuthenticationType; - /** Username to logon the FTP server. Type: string (or Expression with resultType string). */ - userName?: any; - /** Password to logon the FTP server. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; - /** If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). */ - enableSsl?: any; - /** If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). */ - enableServerCertificateValidation?: any; + type: "SapBwCube"; }; -/** A linked service for an SSH File Transfer Protocol (SFTP) server. */ -export type SftpServerLinkedService = LinkedService & { +/** The path of the SAP Cloud for Customer OData entity. */ +export type SapCloudForCustomerResourceDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Sftp"; - /** The SFTP server host name. Type: string (or Expression with resultType string). */ - host: any; - /** The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. */ - port?: any; - /** The authentication type to be used to connect to the FTP server. */ - authenticationType?: SftpAuthenticationType; - /** The username used to log on to the SFTP server. Type: string (or Expression with resultType string). */ - userName?: any; - /** Password to logon the SFTP server for Basic authentication. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; - /** The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). */ - privateKeyPath?: any; - /** Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. */ - privateKeyContent?: SecretBaseUnion; - /** The password to decrypt the SSH private key if the SSH private key is encrypted. */ - passPhrase?: SecretBaseUnion; - /** If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). */ - skipHostKeyValidation?: any; - /** The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). */ - hostKeyFingerprint?: any; + type: "SapCloudForCustomerResource"; + /** The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). */ + path: any; }; -/** SAP Business Warehouse Linked Service. */ -export type SapBWLinkedService = LinkedService & { +/** The path of the SAP ECC OData entity. */ +export type SapEccResourceDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SapBW"; - /** Host name of the SAP BW instance. Type: string (or Expression with resultType string). */ - server: any; - /** System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */ - systemNumber: any; - /** Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */ - clientId: any; - /** Username to access the SAP BW server. Type: string (or Expression with resultType string). */ - userName?: any; - /** Password to access the SAP BW server. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "SapEccResource"; + /** The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). */ + path: any; +}; + +/** SAP HANA Table properties. */ +export type SapHanaTableDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "SapHanaTable"; + /** The schema name of SAP HANA. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; + /** The table name of SAP HANA. Type: string (or Expression with resultType string). */ + table?: any; }; -/** SAP HANA Linked Service. */ -export type SapHanaLinkedService = LinkedService & { +/** Sap Business Warehouse Open Hub Destination Table properties. */ +export type SapOpenHubTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SapHana"; - /** SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** Host name of the SAP HANA server. Type: string (or Expression with resultType string). */ - server: any; - /** The authentication type to be used to connect to the SAP HANA server. */ - authenticationType?: SapHanaAuthenticationType; - /** Username to access the SAP HANA server. Type: string (or Expression with resultType string). */ - userName?: any; - /** Password to access the SAP HANA server. */ - password?: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "SapOpenHubTable"; + /** The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). */ + openHubDestinationName: any; + /** Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). */ + excludeLastRequest?: any; + /** The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). */ + baseRequestId?: any; }; -/** Amazon Marketplace Web Service linked service. */ -export type AmazonMWSLinkedService = LinkedService & { +/** The on-premises SQL Server dataset. */ +export type SqlServerTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AmazonMWS"; - /** The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com) */ - endpoint: any; - /** The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) */ - marketplaceID: any; - /** The Amazon seller ID. */ - sellerID: any; - /** The Amazon MWS authentication token. */ - mwsAuthToken?: SecretBaseUnion; - /** The access key id used to access data. */ - accessKeyId: any; - /** The secret key used to access data. */ - secretKey?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "SqlServerTable"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; + /** The table name of the SQL Server dataset. Type: string (or Expression with resultType string). */ + table?: any; }; -/** Azure PostgreSQL linked service. */ -export type AzurePostgreSqlLinkedService = LinkedService & { +/** The Amazon RDS for SQL Server dataset. */ +export type AmazonRdsForSqlServerTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzurePostgreSql"; - /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The Azure key vault secret reference of password in connection string. */ - password?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AmazonRdsForSqlServerTable"; + /** The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; + /** The table name of the SQL Server dataset. Type: string (or Expression with resultType string). */ + table?: any; }; -/** Concur Service linked service. */ -export type ConcurLinkedService = LinkedService & { +/** A Rest service dataset. */ +export type RestResourceDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Concur"; - /** Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. Type: object. */ - connectionProperties?: any; - /** Application client_id supplied by Concur App Management. */ - clientId: any; - /** The user name that you use to access Concur Service. */ - username: any; - /** The password corresponding to the user name that you provided in the username field. */ - password?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "RestResource"; + /** The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). */ + relativeUrl?: any; + /** The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). */ + requestMethod?: any; + /** The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). */ + requestBody?: any; + /** The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */ + additionalHeaders?: any; + /** The pagination rules to compose next page requests. Type: string (or Expression with resultType string). */ + paginationRules?: any; }; -/** Couchbase server linked service. */ -export type CouchbaseLinkedService = LinkedService & { +/** SAP Table Resource properties. */ +export type SapTableResourceDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Couchbase"; - /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The Azure key vault secret reference of credString in connection string. */ - credString?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "SapTableResource"; + /** The name of the SAP Table. Type: string (or Expression with resultType string). */ + tableName: any; }; -/** Drill server linked service. */ -export type DrillLinkedService = LinkedService & { +/** The dataset points to a HTML table in the web page. */ +export type WebTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Drill"; - /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The Azure key vault secret reference of password in connection string. */ - pwd?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "WebTable"; + /** The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. */ + index: any; + /** The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). */ + path?: any; }; -/** Eloqua server linked service. */ -export type EloquaLinkedService = LinkedService & { +/** The Azure Search Index. */ +export type AzureSearchIndexDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Eloqua"; - /** The endpoint of the Eloqua server. (i.e. eloqua.example.com) */ - endpoint: any; - /** The site name and user name of your Eloqua account in the form: sitename/username. (i.e. Eloqua/Alice) */ - username: any; - /** The password corresponding to the user name. */ - password?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AzureSearchIndex"; + /** The name of the Azure Search Index. Type: string (or Expression with resultType string). */ + indexName: any; }; -/** Google BigQuery service linked service. */ -export type GoogleBigQueryLinkedService = LinkedService & { +/** A file in an HTTP web server. */ +export type HttpDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "GoogleBigQuery"; - /** The default BigQuery project to query against. */ - project: any; - /** A comma-separated list of public BigQuery projects to access. */ - additionalProjects?: any; - /** Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. */ - requestGoogleDriveScope?: any; - /** The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. */ - authenticationType: GoogleBigQueryAuthenticationType; - /** The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. */ - refreshToken?: SecretBaseUnion; - /** The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). */ - clientId?: any; - /** The client secret of the google application used to acquire the refresh token. */ - clientSecret?: SecretBaseUnion; - /** The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. */ - email?: any; - /** The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. */ - keyFilePath?: any; - /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ - trustedCertPath?: any; - /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ - useSystemTrustStore?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "HttpFile"; + /** The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). */ + relativeUrl?: any; + /** The HTTP method for the HTTP request. Type: string (or Expression with resultType string). */ + requestMethod?: any; + /** The body for the HTTP request. Type: string (or Expression with resultType string). */ + requestBody?: any; + /** + * The headers for the HTTP Request. e.g. request-header-name-1:request-header-value-1 + * ... + * request-header-name-n:request-header-value-n Type: string (or Expression with resultType string). + */ + additionalHeaders?: any; + /** The format of files. */ + format?: DatasetStorageFormatUnion; + /** The data compression method used on files. */ + compression?: DatasetCompressionUnion; }; -/** Greenplum Database linked service. */ -export type GreenplumLinkedService = LinkedService & { +/** Amazon Marketplace Web Service dataset. */ +export type AmazonMWSObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Greenplum"; - /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The Azure key vault secret reference of password in connection string. */ - pwd?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AmazonMWSObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** HBase server linked service. */ -export type HBaseLinkedService = LinkedService & { +/** Azure PostgreSQL dataset. */ +export type AzurePostgreSqlTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "HBase"; - /** The IP address or host name of the HBase server. (i.e. 192.168.222.160) */ - host: any; - /** The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. */ - port?: any; - /** The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version) */ - httpPath?: any; - /** The authentication mechanism to use to connect to the HBase server. */ - authenticationType: HBaseAuthenticationType; - /** The user name used to connect to the HBase instance. */ - username?: any; - /** The password corresponding to the user name. */ - password?: SecretBaseUnion; - /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ - enableSsl?: any; - /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ - trustedCertPath?: any; - /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ - allowHostNameCNMismatch?: any; - /** Specifies whether to allow self-signed certificates from the server. The default value is false. */ - allowSelfSignedServerCert?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AzurePostgreSqlTable"; + /** The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). */ + tableName?: any; + /** The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). */ + table?: any; + /** The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; }; -/** Hive Server linked service. */ -export type HiveLinkedService = LinkedService & { +/** Concur Service dataset. */ +export type ConcurObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Hive"; - /** IP address or host name of the Hive server, separated by ';' for multiple hosts (only when serviceDiscoveryMode is enable). */ - host: any; - /** The TCP port that the Hive server uses to listen for client connections. */ - port?: any; - /** The type of Hive server. */ - serverType?: HiveServerType; - /** The transport protocol to use in the Thrift layer. */ - thriftTransportProtocol?: HiveThriftTransportProtocol; - /** The authentication method used to access the Hive server. */ - authenticationType: HiveAuthenticationType; - /** true to indicate using the ZooKeeper service, false not. */ - serviceDiscoveryMode?: any; - /** The namespace on ZooKeeper under which Hive Server 2 nodes are added. */ - zooKeeperNameSpace?: any; - /** Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. */ - useNativeQuery?: any; - /** The user name that you use to access Hive Server. */ - username?: any; - /** The password corresponding to the user name that you provided in the Username field */ - password?: SecretBaseUnion; - /** The partial URL corresponding to the Hive server. */ - httpPath?: any; - /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ - enableSsl?: any; - /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ - trustedCertPath?: any; - /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ - useSystemTrustStore?: any; - /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ - allowHostNameCNMismatch?: any; - /** Specifies whether to allow self-signed certificates from the server. The default value is false. */ - allowSelfSignedServerCert?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "ConcurObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Hubspot Service linked service. */ -export type HubspotLinkedService = LinkedService & { +/** Couchbase server dataset. */ +export type CouchbaseTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Hubspot"; - /** The client ID associated with your Hubspot application. */ - clientId: any; - /** The client secret associated with your Hubspot application. */ - clientSecret?: SecretBaseUnion; - /** The access token obtained when initially authenticating your OAuth integration. */ - accessToken?: SecretBaseUnion; - /** The refresh token obtained when initially authenticating your OAuth integration. */ - refreshToken?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "CouchbaseTable"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Impala server linked service. */ -export type ImpalaLinkedService = LinkedService & { +/** Drill server dataset. */ +export type DrillTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Impala"; - /** The IP address or host name of the Impala server. (i.e. 192.168.222.160) */ - host: any; - /** The TCP port that the Impala server uses to listen for client connections. The default value is 21050. */ - port?: any; - /** The authentication type to use. */ - authenticationType: ImpalaAuthenticationType; - /** The user name used to access the Impala server. The default value is anonymous when using SASLUsername. */ - username?: any; - /** The password corresponding to the user name when using UsernameAndPassword. */ - password?: SecretBaseUnion; - /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ - enableSsl?: any; - /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ - trustedCertPath?: any; - /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ - useSystemTrustStore?: any; - /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ - allowHostNameCNMismatch?: any; - /** Specifies whether to allow self-signed certificates from the server. The default value is false. */ - allowSelfSignedServerCert?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "DrillTable"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The table name of the Drill. Type: string (or Expression with resultType string). */ + table?: any; + /** The schema name of the Drill. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; }; -/** Jira Service linked service. */ -export type JiraLinkedService = LinkedService & { +/** Eloqua server dataset. */ +export type EloquaObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Jira"; - /** The IP address or host name of the Jira service. (e.g. jira.example.com) */ - host: any; - /** The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. */ - port?: any; - /** The user name that you use to access Jira Service. */ - username: any; - /** The password corresponding to the user name that you provided in the username field. */ - password?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "EloquaObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Magento server linked service. */ -export type MagentoLinkedService = LinkedService & { +/** Google BigQuery service dataset. */ +export type GoogleBigQueryObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Magento"; - /** The URL of the Magento instance. (i.e. 192.168.222.110/magento3) */ - host: any; - /** The access token from Magento. */ - accessToken?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "GoogleBigQueryObject"; + /** This property will be retired. Please consider using database + table properties instead. */ + tableName?: any; + /** The table name of the Google BigQuery. Type: string (or Expression with resultType string). */ + table?: any; + /** The database name of the Google BigQuery. Type: string (or Expression with resultType string). */ + dataset?: any; }; -/** MariaDB server linked service. */ -export type MariaDBLinkedService = LinkedService & { +/** Greenplum Database dataset. */ +export type GreenplumTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MariaDB"; - /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The Azure key vault secret reference of password in connection string. */ - pwd?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "GreenplumTable"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The table name of Greenplum. Type: string (or Expression with resultType string). */ + table?: any; + /** The schema name of Greenplum. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; }; -/** Azure Database for MariaDB linked service. */ -export type AzureMariaDBLinkedService = LinkedService & { +/** HBase server dataset. */ +export type HBaseObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureMariaDB"; - /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The Azure key vault secret reference of password in connection string. */ - pwd?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "HBaseObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Marketo server linked service. */ -export type MarketoLinkedService = LinkedService & { +/** Hive Server dataset. */ +export type HiveObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Marketo"; - /** The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com) */ - endpoint: any; - /** The client Id of your Marketo service. */ - clientId: any; - /** The client secret of your Marketo service. */ - clientSecret?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "HiveObject"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The table name of the Hive. Type: string (or Expression with resultType string). */ + table?: any; + /** The schema name of the Hive. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; }; -/** Paypal Service linked service. */ -export type PaypalLinkedService = LinkedService & { +/** Hubspot Service dataset. */ +export type HubspotObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Paypal"; - /** The URL of the PayPal instance. (i.e. api.sandbox.paypal.com) */ - host: any; - /** The client ID associated with your PayPal application. */ - clientId: any; - /** The client secret associated with your PayPal application. */ - clientSecret?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "HubspotObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; +}; + +/** Impala server dataset. */ +export type ImpalaObjectDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "ImpalaObject"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The table name of the Impala. Type: string (or Expression with resultType string). */ + table?: any; + /** The schema name of the Impala. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; +}; + +/** Jira Service dataset. */ +export type JiraObjectDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "JiraObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; +}; + +/** Magento server dataset. */ +export type MagentoObjectDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "MagentoObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Phoenix server linked service. */ -export type PhoenixLinkedService = LinkedService & { +/** MariaDB server dataset. */ +export type MariaDBTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Phoenix"; - /** The IP address or host name of the Phoenix server. (i.e. 192.168.222.160) */ - host: any; - /** The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. */ - port?: any; - /** The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. */ - httpPath?: any; - /** The authentication mechanism used to connect to the Phoenix server. */ - authenticationType: PhoenixAuthenticationType; - /** The user name used to connect to the Phoenix server. */ - username?: any; - /** The password corresponding to the user name. */ - password?: SecretBaseUnion; - /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ - enableSsl?: any; - /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ - trustedCertPath?: any; - /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ - useSystemTrustStore?: any; - /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ - allowHostNameCNMismatch?: any; - /** Specifies whether to allow self-signed certificates from the server. The default value is false. */ - allowSelfSignedServerCert?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "MariaDBTable"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Presto server linked service. */ -export type PrestoLinkedService = LinkedService & { +/** Azure Database for MariaDB dataset. */ +export type AzureMariaDBTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Presto"; - /** The IP address or host name of the Presto server. (i.e. 192.168.222.160) */ - host: any; - /** The version of the Presto server. (i.e. 0.148-t) */ - serverVersion: any; - /** The catalog context for all request against the server. */ - catalog: any; - /** The TCP port that the Presto server uses to listen for client connections. The default value is 8080. */ - port?: any; - /** The authentication mechanism used to connect to the Presto server. */ - authenticationType: PrestoAuthenticationType; - /** The user name used to connect to the Presto server. */ - username?: any; - /** The password corresponding to the user name. */ - password?: SecretBaseUnion; - /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ - enableSsl?: any; - /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ - trustedCertPath?: any; - /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ - useSystemTrustStore?: any; - /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ - allowHostNameCNMismatch?: any; - /** Specifies whether to allow self-signed certificates from the server. The default value is false. */ - allowSelfSignedServerCert?: any; - /** The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. */ - timeZoneID?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AzureMariaDBTable"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** QuickBooks server linked service. */ -export type QuickBooksLinkedService = LinkedService & { +/** Marketo server dataset. */ +export type MarketoObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "QuickBooks"; - /** Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked service. Type: object. */ - connectionProperties?: any; - /** The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com) */ - endpoint: any; - /** The company ID of the QuickBooks company to authorize. */ - companyId: any; - /** The consumer key for OAuth 1.0 authentication. */ - consumerKey: any; - /** The consumer secret for OAuth 1.0 authentication. */ - consumerSecret: SecretBaseUnion; - /** The access token for OAuth 1.0 authentication. */ - accessToken: SecretBaseUnion; - /** The access token secret for OAuth 1.0 authentication. */ - accessTokenSecret: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "MarketoObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** ServiceNow server linked service. */ -export type ServiceNowLinkedService = LinkedService & { +/** Paypal Service dataset. */ +export type PaypalObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "ServiceNow"; - /** The endpoint of the ServiceNow server. (i.e. .service-now.com) */ - endpoint: any; - /** The authentication type to use. */ - authenticationType: ServiceNowAuthenticationType; - /** The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. */ - username?: any; - /** The password corresponding to the user name for Basic and OAuth2 authentication. */ - password?: SecretBaseUnion; - /** The client id for OAuth2 authentication. */ - clientId?: any; - /** The client secret for OAuth2 authentication. */ - clientSecret?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "PaypalObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Shopify Service linked service. */ -export type ShopifyLinkedService = LinkedService & { +/** Phoenix server dataset. */ +export type PhoenixObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Shopify"; - /** The endpoint of the Shopify server. (i.e. mystore.myshopify.com) */ - host: any; - /** The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. */ - accessToken?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "PhoenixObject"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The table name of the Phoenix. Type: string (or Expression with resultType string). */ + table?: any; + /** The schema name of the Phoenix. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; }; -/** Spark Server linked service. */ -export type SparkLinkedService = LinkedService & { +/** Presto server dataset. */ +export type PrestoObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Spark"; - /** IP address or host name of the Spark server */ - host: any; - /** The TCP port that the Spark server uses to listen for client connections. */ - port: any; - /** The type of Spark server. */ - serverType?: SparkServerType; - /** The transport protocol to use in the Thrift layer. */ - thriftTransportProtocol?: SparkThriftTransportProtocol; - /** The authentication method used to access the Spark server. */ - authenticationType: SparkAuthenticationType; - /** The user name that you use to access Spark Server. */ - username?: any; - /** The password corresponding to the user name that you provided in the Username field */ - password?: SecretBaseUnion; - /** The partial URL corresponding to the Spark server. */ - httpPath?: any; - /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ - enableSsl?: any; - /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ - trustedCertPath?: any; - /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ - useSystemTrustStore?: any; - /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ - allowHostNameCNMismatch?: any; - /** Specifies whether to allow self-signed certificates from the server. The default value is false. */ - allowSelfSignedServerCert?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "PrestoObject"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The table name of the Presto. Type: string (or Expression with resultType string). */ + table?: any; + /** The schema name of the Presto. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; }; -/** Square Service linked service. */ -export type SquareLinkedService = LinkedService & { +/** QuickBooks server dataset. */ +export type QuickBooksObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Square"; - /** Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. */ - connectionProperties?: any; - /** The URL of the Square instance. (i.e. mystore.mysquare.com) */ - host: any; - /** The client ID associated with your Square application. */ - clientId: any; - /** The client secret associated with your Square application. */ - clientSecret?: SecretBaseUnion; - /** The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500) */ - redirectUri: any; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "QuickBooksObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Xero Service linked service. */ -export type XeroLinkedService = LinkedService & { +/** ServiceNow server dataset. */ +export type ServiceNowObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Xero"; - /** Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. */ - connectionProperties?: any; - /** The endpoint of the Xero server. (i.e. api.xero.com) */ - host: any; - /** The consumer key associated with the Xero application. */ - consumerKey?: SecretBaseUnion; - /** - * The private key from the .pem file that was generated for your Xero private application. You must include all the text from the .pem file, including the Unix line endings( - * ). - */ - privateKey?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "ServiceNowObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; +}; + +/** Shopify Service dataset. */ +export type ShopifyObjectDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "ShopifyObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; +}; + +/** Spark Server dataset. */ +export type SparkObjectDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "SparkObject"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The table name of the Spark. Type: string (or Expression with resultType string). */ + table?: any; + /** The schema name of the Spark. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; }; -/** Zoho server linked service. */ -export type ZohoLinkedService = LinkedService & { +/** Square Service dataset. */ +export type SquareObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Zoho"; - /** Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. */ - connectionProperties?: any; - /** The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private) */ - endpoint: any; - /** The access token for Zoho authentication. */ - accessToken?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "SquareObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Vertica linked service. */ -export type VerticaLinkedService = LinkedService & { +/** Xero Service dataset. */ +export type XeroObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Vertica"; - /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The Azure key vault secret reference of password in connection string. */ - pwd?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "XeroObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Netezza linked service. */ -export type NetezzaLinkedService = LinkedService & { +/** Zoho server dataset. */ +export type ZohoObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Netezza"; - /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ - connectionString?: any; - /** The Azure key vault secret reference of password in connection string. */ - pwd?: AzureKeyVaultSecretReference; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "ZohoObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Salesforce Marketing Cloud linked service. */ -export type SalesforceMarketingCloudLinkedService = LinkedService & { +/** Netezza dataset. */ +export type NetezzaTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SalesforceMarketingCloud"; - /** Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. */ - connectionProperties?: any; - /** The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). */ - clientId: any; - /** The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). */ - clientSecret?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "NetezzaTable"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The table name of the Netezza. Type: string (or Expression with resultType string). */ + table?: any; + /** The schema name of the Netezza. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; }; -/** HDInsight ondemand linked service. */ -export type HDInsightOnDemandLinkedService = LinkedService & { +/** Vertica dataset. */ +export type VerticaTableDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "HDInsightOnDemand"; - /** Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). */ - clusterSize: any; - /** The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). */ - timeToLive: any; - /** Version of the HDInsight cluster.  Type: string (or Expression with resultType string). */ - version: any; - /** Azure Storage linked service to be used by the on-demand cluster for storing and processing data. */ - linkedServiceName: LinkedServiceReference; - /** The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). */ - hostSubscriptionId: any; - /** The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** The key for the service principal id. */ - servicePrincipalKey?: SecretBaseUnion; - /** The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). */ - tenant: any; - /** The resource group where the cluster belongs. Type: string (or Expression with resultType string). */ - clusterResourceGroup: any; - /** The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType string). */ - clusterNamePrefix?: any; - /** The username to access the cluster. Type: string (or Expression with resultType string). */ - clusterUserName?: any; - /** The password to access the cluster. */ - clusterPassword?: SecretBaseUnion; - /** The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). */ - clusterSshUserName?: any; - /** The password to SSH remotely connect cluster’s node (for Linux). */ - clusterSshPassword?: SecretBaseUnion; - /** Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can register them on your behalf. */ - additionalLinkedServiceNames?: LinkedServiceReference[]; - /** The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database as the metastore. */ - hcatalogLinkedServiceName?: LinkedServiceReference; - /** The cluster type. Type: string (or Expression with resultType string). */ - clusterType?: any; - /** The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). */ - sparkVersion?: any; - /** Specifies the core configuration parameters (as in core-site.xml) for the HDInsight cluster to be created. */ - coreConfiguration?: any; - /** Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. */ - hBaseConfiguration?: any; - /** Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. */ - hdfsConfiguration?: any; - /** Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. */ - hiveConfiguration?: any; - /** Specifies the MapReduce configuration parameters (mapred-site.xml) for the HDInsight cluster. */ - mapReduceConfiguration?: any; - /** Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. */ - oozieConfiguration?: any; - /** Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. */ - stormConfiguration?: any; - /** Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. */ - yarnConfiguration?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; - /** Specifies the size of the head node for the HDInsight cluster. */ - headNodeSize?: any; - /** Specifies the size of the data node for the HDInsight cluster. */ - dataNodeSize?: any; - /** Specifies the size of the Zoo Keeper node for the HDInsight cluster. */ - zookeeperNodeSize?: any; - /** Custom script actions to run on HDI ondemand cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. */ - scriptActions?: ScriptAction[]; - /** The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). */ - virtualNetworkId?: any; - /** The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). */ - subnetName?: any; + type: "VerticaTable"; + /** This property will be retired. Please consider using schema + table properties instead. */ + tableName?: any; + /** The table name of the Vertica. Type: string (or Expression with resultType string). */ + table?: any; + /** The schema name of the Vertica. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; }; -/** Azure Data Lake Analytics linked service. */ -export type AzureDataLakeAnalyticsLinkedService = LinkedService & { +/** Salesforce Marketing Cloud dataset. */ +export type SalesforceMarketingCloudObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureDataLakeAnalytics"; - /** The Azure Data Lake Analytics account name. Type: string (or Expression with resultType string). */ - accountName: any; - /** The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). */ - servicePrincipalId?: any; - /** The Key of the application used to authenticate against the Azure Data Lake Analytics account. */ - servicePrincipalKey?: SecretBaseUnion; - /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ - tenant: any; - /** Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). */ - subscriptionId?: any; - /** Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). */ - resourceGroupName?: any; - /** Azure Data Lake Analytics URI Type: string (or Expression with resultType string). */ - dataLakeAnalyticsUri?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "SalesforceMarketingCloudObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Azure Databricks linked service. */ -export type AzureDatabricksLinkedService = LinkedService & { +/** Responsys dataset. */ +export type ResponsysObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureDatabricks"; - /** .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). */ - domain: any; - /** Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). */ - accessToken?: SecretBaseUnion; - /** Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */ - authentication?: any; - /** Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */ - workspaceResourceId?: any; - /** The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). */ - existingClusterId?: any; - /** The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). */ - instancePoolId?: any; - /** If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). */ - newClusterVersion?: any; - /** If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). */ - newClusterNumOfWorker?: any; - /** The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). */ - newClusterNodeType?: any; - /** A set of optional, user-specified Spark configuration key-value pairs. */ - newClusterSparkConf?: { [propertyName: string]: any }; - /** A set of optional, user-specified Spark environment variables key-value pairs. */ - newClusterSparkEnvVars?: { [propertyName: string]: any }; - /** Additional tags for cluster resources. This property is ignored in instance pool configurations. */ - newClusterCustomTags?: { [propertyName: string]: any }; - /** Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). */ - newClusterLogDestination?: any; - /** The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). */ - newClusterDriverNodeType?: any; - /** User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). */ - newClusterInitScripts?: any; - /** Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). */ - newClusterEnableElasticDisk?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; - /** The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). */ - policyId?: any; + type: "ResponsysObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Azure Databricks Delta Lake linked service. */ -export type AzureDatabricksDeltaLakeLinkedService = LinkedService & { +/** The path of the Dynamics AX OData entity. */ +export type DynamicsAXResourceDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureDatabricksDeltaLake"; - /** .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). */ - domain: any; - /** Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. */ - accessToken: SecretBaseUnion; - /** The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). */ - clusterId?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "DynamicsAXResource"; + /** The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). */ + path: any; +}; + +/** Oracle Service Cloud dataset. */ +export type OracleServiceCloudObjectDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "OracleServiceCloudObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; +}; + +/** The Azure Data Explorer (Kusto) dataset. */ +export type AzureDataExplorerTableDataset = Dataset & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AzureDataExplorerTable"; + /** The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). */ + table?: any; }; -/** Responsys linked service. */ -export type ResponsysLinkedService = LinkedService & { +/** Google AdWords service dataset. */ +export type GoogleAdWordsObjectDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Responsys"; - /** The endpoint of the Responsys server. */ - endpoint: any; - /** The client ID associated with the Responsys application. Type: string (or Expression with resultType string). */ - clientId: any; - /** The client secret associated with the Responsys application. Type: string (or Expression with resultType string). */ - clientSecret?: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "GoogleAdWordsObject"; + /** The table name. Type: string (or Expression with resultType string). */ + tableName?: any; }; -/** Dynamics AX linked service. */ -export type DynamicsAXLinkedService = LinkedService & { +/** The snowflake dataset. */ +export type SnowflakeDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "DynamicsAX"; - /** The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. */ - url: any; - /** Specify the application's client ID. Type: string (or Expression with resultType string). */ - servicePrincipalId: any; - /** Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). */ - servicePrincipalKey: SecretBaseUnion; - /** Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). */ - tenant: any; - /** Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). */ - aadResourceId: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "SnowflakeTable"; + /** The schema name of the Snowflake database. Type: string (or Expression with resultType string). */ + schemaTypePropertiesSchema?: any; + /** The table name of the Snowflake database. Type: string (or Expression with resultType string). */ + table?: any; }; -/** Oracle Service Cloud linked service. */ -export type OracleServiceCloudLinkedService = LinkedService & { +/** The sharepoint online list resource dataset. */ +export type SharePointOnlineListResourceDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "OracleServiceCloud"; - /** The URL of the Oracle Service Cloud instance. */ - host: any; - /** The user name that you use to access Oracle Service Cloud server. */ - username: any; - /** The password corresponding to the user name that you provided in the username key. */ - password: SecretBaseUnion; - /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */ - useEncryptedEndpoints?: any; - /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ - useHostVerification?: any; - /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ - usePeerVerification?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "SharePointOnlineListResource"; + /** The name of the SharePoint Online list. Type: string (or Expression with resultType string). */ + listName?: any; }; -/** Google AdWords service linked service. */ -export type GoogleAdWordsLinkedService = LinkedService & { +/** Azure Databricks Delta Lake dataset. */ +export type AzureDatabricksDeltaLakeDataset = Dataset & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "GoogleAdWords"; - /** The Client customer ID of the AdWords account that you want to fetch report data for. */ - clientCustomerID: any; - /** The developer token associated with the manager account that you use to grant access to the AdWords API. */ - developerToken: SecretBaseUnion; - /** The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. */ - authenticationType: GoogleAdWordsAuthenticationType; - /** The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. */ - refreshToken?: SecretBaseUnion; - /** The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). */ - clientId?: any; - /** The client secret of the google application used to acquire the refresh token. */ - clientSecret?: SecretBaseUnion; - /** The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. */ - email?: any; - /** The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. */ - keyFilePath?: any; - /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ - trustedCertPath?: any; - /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ - useSystemTrustStore?: any; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + type: "AzureDatabricksDeltaLakeDataset"; + /** The name of delta table. Type: string (or Expression with resultType string). */ + table?: any; + /** The database name of delta table. Type: string (or Expression with resultType string). */ + database?: any; }; -/** SAP Table Linked Service. */ -export type SapTableLinkedService = LinkedService & { +/** The storage account linked service. */ +export type AzureStorageLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SapTable"; - /** Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). */ - server?: any; - /** System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */ - systemNumber?: any; - /** Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */ - clientId?: any; - /** Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). */ - language?: any; - /** SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */ - systemId?: any; - /** Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ - userName?: any; - /** Password to access the SAP server where the table is located. */ - password?: SecretBaseUnion; - /** The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */ - messageServer?: any; - /** The service name or port number of the Message Server. Type: string (or Expression with resultType string). */ - messageServerService?: any; - /** SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). */ - sncMode?: any; - /** Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ - sncMyName?: any; - /** Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ - sncPartnerName?: any; - /** External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ - sncLibraryPath?: any; - /** SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). */ - sncQop?: any; - /** The Logon Group for the SAP System. Type: string (or Expression with resultType string). */ - logonGroup?: any; + type: "AzureStorage"; + /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of accountKey in connection string. */ + accountKey?: AzureKeyVaultSecretReference; + /** SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */ + sasUri?: any; + /** The Azure key vault secret reference of sasToken in sas uri. */ + sasToken?: AzureKeyVaultSecretReference; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + encryptedCredential?: string; }; -/** Azure Data Explorer (Kusto) linked service. */ -export type AzureDataExplorerLinkedService = LinkedService & { +/** The azure blob storage linked service. */ +export type AzureBlobStorageLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureDataExplorer"; - /** The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format https://..kusto.windows.net. Type: string (or Expression with resultType string) */ - endpoint: any; - /** The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). */ - servicePrincipalId: any; - /** The key of the service principal used to authenticate against Kusto. */ - servicePrincipalKey: SecretBaseUnion; - /** Database name for connection. Type: string (or Expression with resultType string). */ - database: any; + type: "AzureBlobStorage"; + /** The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of accountKey in connection string. */ + accountKey?: AzureKeyVaultSecretReference; + /** SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. */ + sasUri?: any; + /** The Azure key vault secret reference of sasToken in sas uri. */ + sasToken?: AzureKeyVaultSecretReference; + /** Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. */ + serviceEndpoint?: string; + /** The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** The key of the service principal used to authenticate against Azure SQL Data Warehouse. */ + servicePrincipalKey?: SecretBaseUnion; /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ - tenant: any; + tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; + /** Specify the kind of your storage account. Allowed values are: Storage (general purpose v1), StorageV2 (general purpose v2), BlobStorage, or BlockBlobStorage. Type: string (or Expression with resultType string). */ + accountKind?: string; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: string; }; -/** Azure Function linked service. */ -export type AzureFunctionLinkedService = LinkedService & { +/** The azure table storage linked service. */ +export type AzureTableStorageLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureFunction"; - /** The endpoint of the Azure Function App. URL will be in the format https://.azurewebsites.net. */ - functionAppUrl: any; - /** Function or Host key for Azure Function App. */ - functionKey?: SecretBaseUnion; + type: "AzureTableStorage"; + /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of accountKey in connection string. */ + accountKey?: AzureKeyVaultSecretReference; + /** SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */ + sasUri?: any; + /** The Azure key vault secret reference of sasToken in sas uri. */ + sasToken?: AzureKeyVaultSecretReference; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; + encryptedCredential?: string; }; -/** Snowflake linked service. */ -export type SnowflakeLinkedService = LinkedService & { +/** Azure SQL Data Warehouse linked service. */ +export type AzureSqlDWLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Snowflake"; - /** The connection string of snowflake. Type: string, SecureString. */ + type: "AzureSqlDW"; + /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ connectionString: any; /** The Azure key vault secret reference of password in connection string. */ password?: AzureKeyVaultSecretReference; + /** The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** The key of the service principal used to authenticate against Azure SQL Data Warehouse. */ + servicePrincipalKey?: SecretBaseUnion; + /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ + tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ encryptedCredential?: any; }; -/** SharePoint Online List linked service. */ -export type SharePointOnlineListLinkedService = LinkedService & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SharePointOnlineList"; - /** The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType string). */ - siteUrl: any; - /** The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview page. Type: string (or Expression with resultType string). */ - tenantId: any; - /** The application (client) ID of your application registered in Azure Active Directory. Make sure to grant SharePoint site permission to this application. Type: string (or Expression with resultType string). */ - servicePrincipalId: any; - /** The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ - servicePrincipalKey: SecretBaseUnion; - /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ - encryptedCredential?: any; -}; - -/** A single Amazon Simple Storage Service (S3) object or a set of S3 objects. */ -export type AmazonS3Dataset = Dataset & { +/** SQL Server linked service. */ +export type SqlServerLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AmazonS3Object"; - /** The name of the Amazon S3 bucket. Type: string (or Expression with resultType string). */ - bucketName: any; - /** The key of the Amazon S3 object. Type: string (or Expression with resultType string). */ - key?: any; - /** The prefix filter for the S3 object name. Type: string (or Expression with resultType string). */ - prefix?: any; - /** The version for the S3 object. Type: string (or Expression with resultType string). */ - version?: any; - /** The start of S3 object's modified datetime. Type: string (or Expression with resultType string). */ - modifiedDatetimeStart?: any; - /** The end of S3 object's modified datetime. Type: string (or Expression with resultType string). */ - modifiedDatetimeEnd?: any; - /** The format of files. */ - format?: DatasetStorageFormatUnion; - /** The data compression method used for the Amazon S3 object. */ - compression?: DatasetCompressionUnion; + type: "SqlServer"; + /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** The on-premises Windows authentication user name. Type: string (or Expression with resultType string). */ + userName?: any; + /** The on-premises Windows authentication password. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Avro dataset. */ -export type AvroDataset = Dataset & { +/** Amazon RDS for SQL Server linked service. */ +export type AmazonRdsForSqlServerLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Avro"; - /** The location of the avro storage. */ - location?: DatasetLocationUnion; - /** A string from AvroCompressionCodecEnum or an expression */ - avroCompressionCodec?: any; - avroCompressionLevel?: number; + type: "AmazonRdsForSqlServer"; + /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** The on-premises Windows authentication user name. Type: string (or Expression with resultType string). */ + userName?: any; + /** The on-premises Windows authentication password. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Excel dataset. */ -export type ExcelDataset = Dataset & { +/** Microsoft Azure SQL Database linked service. */ +export type AzureSqlDatabaseLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Excel"; - /** The location of the excel storage. */ - location?: DatasetLocationUnion; - /** The sheet of excel file. Type: string (or Expression with resultType string). */ - sheetName?: any; - /** The partial data of one sheet. Type: string (or Expression with resultType string). */ - range?: any; - /** When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */ - firstRowAsHeader?: any; - /** The data compression method used for the json dataset. */ - compression?: DatasetCompressionUnion; - /** The null value string. Type: string (or Expression with resultType string). */ - nullValue?: any; + type: "AzureSqlDatabase"; + /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** The Azure key vault secret reference of password in connection string. */ + password?: AzureKeyVaultSecretReference; + /** The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** The key of the service principal used to authenticate against Azure SQL Database. */ + servicePrincipalKey?: SecretBaseUnion; + /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ + tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Parquet dataset. */ -export type ParquetDataset = Dataset & { +/** Azure SQL Managed Instance linked service. */ +export type AzureSqlMILinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Parquet"; - /** The location of the parquet storage. */ - location?: DatasetLocationUnion; - /** A string from ParquetCompressionCodecEnum or an expression */ - compressionCodec?: any; + type: "AzureSqlMI"; + /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** The Azure key vault secret reference of password in connection string. */ + password?: AzureKeyVaultSecretReference; + /** The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** The key of the service principal used to authenticate against Azure SQL Managed Instance. */ + servicePrincipalKey?: SecretBaseUnion; + /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ + tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Delimited text dataset. */ -export type DelimitedTextDataset = Dataset & { +/** Azure Batch linked service. */ +export type AzureBatchLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "DelimitedText"; - /** The location of the delimited text storage. */ - location?: DatasetLocationUnion; - /** The column delimiter. Type: string (or Expression with resultType string). */ - columnDelimiter?: any; - /** The row delimiter. Type: string (or Expression with resultType string). */ - rowDelimiter?: any; - /** The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */ - encodingName?: any; - compressionCodec?: CompressionCodec; - /** The data compression method used for DelimitedText. */ - compressionLevel?: any; - /** The quote character. Type: string (or Expression with resultType string). */ - quoteChar?: any; - /** The escape character. Type: string (or Expression with resultType string). */ - escapeChar?: any; - /** When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). */ - firstRowAsHeader?: any; - /** The null value string. Type: string (or Expression with resultType string). */ - nullValue?: any; + type: "AzureBatch"; + /** The Azure Batch account name. Type: string (or Expression with resultType string). */ + accountName: any; + /** The Azure Batch account access key. */ + accessKey?: SecretBaseUnion; + /** The Azure Batch URI. Type: string (or Expression with resultType string). */ + batchUri: any; + /** The Azure Batch pool name. Type: string (or Expression with resultType string). */ + poolName: any; + /** The Azure Storage linked service reference. */ + linkedServiceName: LinkedServiceReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Json dataset. */ -export type JsonDataset = Dataset & { +/** Azure Key Vault linked service. */ +export type AzureKeyVaultLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Json"; - /** The location of the json data storage. */ - location?: DatasetLocationUnion; - /** The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */ - encodingName?: any; - /** The data compression method used for the json dataset. */ - compression?: DatasetCompressionUnion; + type: "AzureKeyVault"; + /** The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). */ + baseUrl: any; }; -/** Xml dataset. */ -export type XmlDataset = Dataset & { +/** Microsoft Azure Cosmos Database (CosmosDB) linked service. */ +export type CosmosDbLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Xml"; - /** The location of the json data storage. */ - location?: DatasetLocationUnion; - /** The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). */ - encodingName?: any; - /** The null value string. Type: string (or Expression with resultType string). */ - nullValue?: any; - /** The data compression method used for the json dataset. */ - compression?: DatasetCompressionUnion; + type: "CosmosDb"; + /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string) */ + accountEndpoint?: any; + /** The name of the database. Type: string (or Expression with resultType string) */ + database?: any; + /** The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. */ + accountKey?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** ORC dataset. */ -export type OrcDataset = Dataset & { +/** Dynamics linked service. */ +export type DynamicsLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Orc"; - /** The location of the ORC data storage. */ - location?: DatasetLocationUnion; - orcCompressionCodec?: OrcCompressionCodec; + type: "Dynamics"; + /** The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). */ + deploymentType: any; + /** The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */ + hostName?: any; + /** The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */ + port?: any; + /** The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */ + serviceUri?: any; + /** The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). */ + organizationName?: any; + /** The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */ + authenticationType: any; + /** User name to access the Dynamics instance. Type: string (or Expression with resultType string). */ + username?: any; + /** Password to access the Dynamics instance. */ + password?: SecretBaseUnion; + /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). */ + servicePrincipalCredentialType?: any; + /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ + servicePrincipalCredential?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Binary dataset. */ -export type BinaryDataset = Dataset & { +/** Dynamics CRM linked service. */ +export type DynamicsCrmLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Binary"; - /** The location of the Binary storage. */ - location?: DatasetLocationUnion; - /** The data compression method used for the binary dataset. */ - compression?: DatasetCompressionUnion; + type: "DynamicsCrm"; + /** The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string (or Expression with resultType string). */ + deploymentType: any; + /** The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */ + hostName?: any; + /** The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */ + port?: any; + /** The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */ + serviceUri?: any; + /** The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). */ + organizationName?: any; + /** The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */ + authenticationType: any; + /** User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). */ + username?: any; + /** Password to access the Dynamics CRM instance. */ + password?: SecretBaseUnion; + /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** A string from ServicePrincipalCredentialEnum or an expression */ + servicePrincipalCredentialType?: any; + /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ + servicePrincipalCredential?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Azure Blob storage. */ -export type AzureBlobDataset = Dataset & { +/** Common Data Service for Apps linked service. */ +export type CommonDataServiceForAppsLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureBlob"; - /** The path of the Azure Blob storage. Type: string (or Expression with resultType string). */ - folderPath?: any; - /** The root of blob path. Type: string (or Expression with resultType string). */ - tableRootLocation?: any; - /** The name of the Azure Blob. Type: string (or Expression with resultType string). */ - fileName?: any; - /** The start of Azure Blob's modified datetime. Type: string (or Expression with resultType string). */ - modifiedDatetimeStart?: any; - /** The end of Azure Blob's modified datetime. Type: string (or Expression with resultType string). */ - modifiedDatetimeEnd?: any; - /** The format of the Azure Blob storage. */ - format?: DatasetStorageFormatUnion; - /** The data compression method used for the blob storage. */ - compression?: DatasetCompressionUnion; + type: "CommonDataServiceForApps"; + /** The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType string). */ + deploymentType: any; + /** The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). */ + hostName?: any; + /** The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. */ + port?: any; + /** The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). */ + serviceUri?: any; + /** The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). */ + organizationName?: any; + /** The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). */ + authenticationType: any; + /** User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). */ + username?: any; + /** Password to access the Common Data Service for Apps instance. */ + password?: SecretBaseUnion; + /** The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** A string from ServicePrincipalCredentialEnum or an expression */ + servicePrincipalCredentialType?: any; + /** The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. */ + servicePrincipalCredential?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; +}; + +/** HDInsight linked service. */ +export type HDInsightLinkedService = LinkedService & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "HDInsight"; + /** HDInsight cluster URI. Type: string (or Expression with resultType string). */ + clusterUri: any; + /** HDInsight cluster user name. Type: string (or Expression with resultType string). */ + userName?: any; + /** HDInsight cluster password. */ + password?: SecretBaseUnion; + /** The Azure Storage linked service reference. */ + linkedServiceName?: LinkedServiceReference; + /** A reference to the Azure SQL linked service that points to the HCatalog database. */ + hcatalogLinkedServiceName?: LinkedServiceReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; + /** Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. */ + isEspEnabled?: any; + /** Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). */ + fileSystem?: any; }; -/** The Azure Table storage dataset. */ -export type AzureTableDataset = Dataset & { +/** File system linked service. */ +export type FileServerLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureTable"; - /** The table name of the Azure Table storage. Type: string (or Expression with resultType string). */ - tableName: any; + type: "FileServer"; + /** Host name of the server. Type: string (or Expression with resultType string). */ + host: any; + /** User ID to logon the server. Type: string (or Expression with resultType string). */ + userId?: any; + /** Password to logon the server. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Azure SQL Server database dataset. */ -export type AzureSqlTableDataset = Dataset & { +/** Azure File Storage linked service. */ +export type AzureFileStorageLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureSqlTable"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The schema name of the Azure SQL database. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; - /** The table name of the Azure SQL database. Type: string (or Expression with resultType string). */ - table?: any; + type: "AzureFileStorage"; + /** Host name of the server. Type: string (or Expression with resultType string). */ + host: any; + /** User ID to logon the server. Type: string (or Expression with resultType string). */ + userId?: any; + /** Password to logon the server. */ + password?: SecretBaseUnion; + /** The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of accountKey in connection string. */ + accountKey?: AzureKeyVaultSecretReference; + /** SAS URI of the Azure File resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. */ + sasUri?: any; + /** The Azure key vault secret reference of sasToken in sas uri. */ + sasToken?: AzureKeyVaultSecretReference; + /** The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or Expression with resultType string). */ + fileShare?: any; + /** The azure file share snapshot version. Type: string (or Expression with resultType string). */ + snapshot?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Azure SQL Managed Instance dataset. */ -export type AzureSqlMITableDataset = Dataset & { +/** Linked service for Google Cloud Storage. */ +export type GoogleCloudStorageLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureSqlMITable"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; - /** The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). */ - table?: any; + type: "GoogleCloudStorage"; + /** The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */ + accessKeyId?: any; + /** The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. */ + secretAccessKey?: SecretBaseUnion; + /** This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */ + serviceUrl?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Azure SQL Data Warehouse dataset. */ -export type AzureSqlDWTableDataset = Dataset & { +/** Oracle database. */ +export type OracleLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureSqlDWTable"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; - /** The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). */ - table?: any; + type: "Oracle"; + /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** The Azure key vault secret reference of password in connection string. */ + password?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Cassandra database dataset. */ -export type CassandraTableDataset = Dataset & { +/** AmazonRdsForOracle database. */ +export type AmazonRdsForOracleLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "CassandraTable"; - /** The table name of the Cassandra database. Type: string (or Expression with resultType string). */ - tableName?: any; - /** The keyspace of the Cassandra database. Type: string (or Expression with resultType string). */ - keyspace?: any; + type: "AmazonRdsForOracle"; + /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** The Azure key vault secret reference of password in connection string. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The custom dataset. */ -export type CustomDataset = Dataset & { +/** Azure MySQL database linked service. */ +export type AzureMySqlLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "CustomDataset"; - /** Custom dataset properties. */ - typeProperties?: any; + type: "AzureMySql"; + /** The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** The Azure key vault secret reference of password in connection string. */ + password?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Microsoft Azure CosmosDB (SQL API) Collection dataset. */ -export type CosmosDbSqlApiCollectionDataset = Dataset & { +/** Linked service for MySQL data source. */ +export type MySqlLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "CosmosDbSqlApiCollection"; - /** CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). */ - collectionName: any; + type: "MySql"; + /** The connection string. */ + connectionString: any; + /** The Azure key vault secret reference of password in connection string. */ + password?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Microsoft Azure Document Database Collection dataset. */ -export type DocumentDbCollectionDataset = Dataset & { +/** Linked service for PostgreSQL data source. */ +export type PostgreSqlLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "DocumentDbCollection"; - /** Document Database collection name. Type: string (or Expression with resultType string). */ - collectionName: any; + type: "PostgreSql"; + /** The connection string. */ + connectionString: any; + /** The Azure key vault secret reference of password in connection string. */ + password?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Dynamics entity dataset. */ -export type DynamicsEntityDataset = Dataset & { +/** Linked service for Sybase data source. */ +export type SybaseLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "DynamicsEntity"; - /** The logical name of the entity. Type: string (or Expression with resultType string). */ - entityName?: any; + type: "Sybase"; + /** Server name for connection. Type: string (or Expression with resultType string). */ + server: any; + /** Database name for connection. Type: string (or Expression with resultType string). */ + database: any; + /** Schema name for connection. Type: string (or Expression with resultType string). */ + schema?: any; + /** AuthenticationType to be used for connection. */ + authenticationType?: SybaseAuthenticationType; + /** Username for authentication. Type: string (or Expression with resultType string). */ + username?: any; + /** Password for authentication. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Dynamics CRM entity dataset. */ -export type DynamicsCrmEntityDataset = Dataset & { +/** Linked service for DB2 data source. */ +export type Db2LinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "DynamicsCrmEntity"; - /** The logical name of the entity. Type: string (or Expression with resultType string). */ - entityName?: any; + type: "Db2"; + /** The connection string. It is mutually exclusive with server, database, authenticationType, userName, packageCollection and certificateCommonName property. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** Server name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ + server: any; + /** Database name for connection. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ + database: any; + /** AuthenticationType to be used for connection. It is mutually exclusive with connectionString property. */ + authenticationType?: Db2AuthenticationType; + /** Username for authentication. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ + username?: any; + /** Password for authentication. */ + password?: SecretBaseUnion; + /** Under where packages are created when querying database. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ + packageCollection?: any; + /** Certificate Common Name when TLS is enabled. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ + certificateCommonName?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. It is mutually exclusive with connectionString property. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Common Data Service for Apps entity dataset. */ -export type CommonDataServiceForAppsEntityDataset = Dataset & { +/** Linked service for Teradata data source. */ +export type TeradataLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "CommonDataServiceForAppsEntity"; - /** The logical name of the entity. Type: string (or Expression with resultType string). */ - entityName?: any; + type: "Teradata"; + /** Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** Server name for connection. Type: string (or Expression with resultType string). */ + server?: any; + /** AuthenticationType to be used for connection. */ + authenticationType?: TeradataAuthenticationType; + /** Username for authentication. Type: string (or Expression with resultType string). */ + username?: any; + /** Password for authentication. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Azure Data Lake Store dataset. */ -export type AzureDataLakeStoreDataset = Dataset & { +/** Azure ML Studio Web Service linked service. */ +export type AzureMLLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureDataLakeStoreFile"; - /** Path to the folder in the Azure Data Lake Store. Type: string (or Expression with resultType string). */ - folderPath?: any; - /** The name of the file in the Azure Data Lake Store. Type: string (or Expression with resultType string). */ - fileName?: any; - /** The format of the Data Lake Store. */ - format?: DatasetStorageFormatUnion; - /** The data compression method used for the item(s) in the Azure Data Lake Store. */ - compression?: DatasetCompressionUnion; + type: "AzureML"; + /** The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). */ + mlEndpoint: any; + /** The API key for accessing the Azure ML model endpoint. */ + apiKey: SecretBaseUnion; + /** The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). */ + updateResourceEndpoint?: any; + /** The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. */ + servicePrincipalKey?: SecretBaseUnion; + /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ + tenant?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Azure Data Lake Storage Gen2 storage. */ -export type AzureBlobFSDataset = Dataset & { +/** Azure ML Service linked service. */ +export type AzureMLServiceLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureBlobFSFile"; - /** The path of the Azure Data Lake Storage Gen2 storage. Type: string (or Expression with resultType string). */ - folderPath?: any; - /** The name of the Azure Data Lake Storage Gen2. Type: string (or Expression with resultType string). */ - fileName?: any; - /** The format of the Azure Data Lake Storage Gen2 storage. */ - format?: DatasetStorageFormatUnion; - /** The data compression method used for the blob storage. */ - compression?: DatasetCompressionUnion; + type: "AzureMLService"; + /** Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). */ + subscriptionId: any; + /** Azure ML Service workspace resource group name. Type: string (or Expression with resultType string). */ + resourceGroupName: any; + /** Azure ML Service workspace name. Type: string (or Expression with resultType string). */ + mlWorkspaceName: any; + /** The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. */ + servicePrincipalKey?: SecretBaseUnion; + /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ + tenant?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Office365 account. */ -export type Office365Dataset = Dataset & { +/** Open Database Connectivity (ODBC) linked service. */ +export type OdbcLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Office365Table"; - /** Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). */ - tableName: any; - /** A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with resultType string). */ - predicate?: any; + type: "Odbc"; + /** The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */ + authenticationType?: any; + /** The access credential portion of the connection string specified in driver-specific property-value format. */ + credential?: SecretBaseUnion; + /** User name for Basic authentication. Type: string (or Expression with resultType string). */ + userName?: any; + /** Password for Basic authentication. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** An on-premises file system dataset. */ -export type FileShareDataset = Dataset & { +/** Informix linked service. */ +export type InformixLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "FileShare"; - /** The path of the on-premises file system. Type: string (or Expression with resultType string). */ - folderPath?: any; - /** The name of the on-premises file system. Type: string (or Expression with resultType string). */ - fileName?: any; - /** The start of file's modified datetime. Type: string (or Expression with resultType string). */ - modifiedDatetimeStart?: any; - /** The end of file's modified datetime. Type: string (or Expression with resultType string). */ - modifiedDatetimeEnd?: any; - /** The format of the files. */ - format?: DatasetStorageFormatUnion; - /** Specify a filter to be used to select a subset of files in the folderPath rather than all files. Type: string (or Expression with resultType string). */ - fileFilter?: any; - /** The data compression method used for the file system. */ - compression?: DatasetCompressionUnion; + type: "Informix"; + /** The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */ + authenticationType?: any; + /** The access credential portion of the connection string specified in driver-specific property-value format. */ + credential?: SecretBaseUnion; + /** User name for Basic authentication. Type: string (or Expression with resultType string). */ + userName?: any; + /** Password for Basic authentication. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The MongoDB database dataset. */ -export type MongoDbCollectionDataset = Dataset & { +/** Microsoft Access linked service. */ +export type MicrosoftAccessLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MongoDbCollection"; - /** The table name of the MongoDB database. Type: string (or Expression with resultType string). */ - collectionName: any; + type: "MicrosoftAccess"; + /** The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). */ + authenticationType?: any; + /** The access credential portion of the connection string specified in driver-specific property-value format. */ + credential?: SecretBaseUnion; + /** User name for Basic authentication. Type: string (or Expression with resultType string). */ + userName?: any; + /** Password for Basic authentication. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The MongoDB Atlas database dataset. */ -export type MongoDbAtlasCollectionDataset = Dataset & { +/** Hadoop Distributed File System (HDFS) linked service. */ +export type HdfsLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MongoDbAtlasCollection"; - /** The collection name of the MongoDB Atlas database. Type: string (or Expression with resultType string). */ - collection: any; + type: "Hdfs"; + /** The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). */ + url: any; + /** Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). */ + authenticationType?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; + /** User name for Windows authentication. Type: string (or Expression with resultType string). */ + userName?: any; + /** Password for Windows authentication. */ + password?: SecretBaseUnion; }; -/** The MongoDB database dataset. */ -export type MongoDbV2CollectionDataset = Dataset & { +/** Open Data Protocol (OData) linked service. */ +export type ODataLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MongoDbV2Collection"; - /** The collection name of the MongoDB database. Type: string (or Expression with resultType string). */ - collection: any; + type: "OData"; + /** The URL of the OData service endpoint. Type: string (or Expression with resultType string). */ + url: any; + /** Type of authentication used to connect to the OData service. */ + authenticationType?: ODataAuthenticationType; + /** User name of the OData service. Type: string (or Expression with resultType string). */ + userName?: any; + /** Password of the OData service. */ + password?: SecretBaseUnion; + /** Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). */ + tenant?: any; + /** Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; + /** Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). */ + aadResourceId?: any; + /** Specify the credential type (key or cert) is used for service principal. */ + aadServicePrincipalCredentialType?: ODataAadServicePrincipalCredentialType; + /** Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ + servicePrincipalKey?: SecretBaseUnion; + /** Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ + servicePrincipalEmbeddedCert?: SecretBaseUnion; + /** Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). */ + servicePrincipalEmbeddedCertPassword?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The CosmosDB (MongoDB API) database dataset. */ -export type CosmosDbMongoDbApiCollectionDataset = Dataset & { +/** Web linked service. */ +export type WebLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "CosmosDbMongoDbApiCollection"; - /** The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). */ - collection: any; + type: "Web"; + /** Web linked service properties. */ + typeProperties: WebLinkedServiceTypePropertiesUnion; }; -/** The Open Data Protocol (OData) resource dataset. */ -export type ODataResourceDataset = Dataset & { +/** Linked service for Cassandra data source. */ +export type CassandraLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "ODataResource"; - /** The OData resource path. Type: string (or Expression with resultType string). */ - path?: any; + type: "Cassandra"; + /** Host name for connection. Type: string (or Expression with resultType string). */ + host: any; + /** AuthenticationType to be used for connection. Type: string (or Expression with resultType string). */ + authenticationType?: any; + /** The port for the connection. Type: integer (or Expression with resultType integer). */ + port?: any; + /** Username for authentication. Type: string (or Expression with resultType string). */ + username?: any; + /** Password for authentication. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The on-premises Oracle database dataset. */ -export type OracleTableDataset = Dataset & { +/** Linked service for MongoDb data source. */ +export type MongoDbLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "OracleTable"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; - /** The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). */ - table?: any; + type: "MongoDb"; + /** The IP address or server name of the MongoDB server. Type: string (or Expression with resultType string). */ + server: any; + /** The authentication type to be used to connect to the MongoDB database. */ + authenticationType?: MongoDbAuthenticationType; + /** The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). */ + databaseName: any; + /** Username for authentication. Type: string (or Expression with resultType string). */ + username?: any; + /** Password for authentication. */ + password?: SecretBaseUnion; + /** Database to verify the username and password. Type: string (or Expression with resultType string). */ + authSource?: any; + /** The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. */ + port?: any; + /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). */ + enableSsl?: any; + /** Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). */ + allowSelfSignedServerCert?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Teradata database dataset. */ -export type TeradataTableDataset = Dataset & { +/** Linked service for MongoDB Atlas data source. */ +export type MongoDbAtlasLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "TeradataTable"; - /** The database name of Teradata. Type: string (or Expression with resultType string). */ - database?: any; - /** The table name of Teradata. Type: string (or Expression with resultType string). */ - table?: any; + type: "MongoDbAtlas"; + /** The MongoDB Atlas connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** The name of the MongoDB Atlas database that you want to access. Type: string (or Expression with resultType string). */ + database: any; }; -/** The Azure MySQL database dataset. */ -export type AzureMySqlTableDataset = Dataset & { +/** Linked service for MongoDB data source. */ +export type MongoDbV2LinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureMySqlTable"; - /** The Azure MySQL database table name. Type: string (or Expression with resultType string). */ - tableName?: any; - /** The name of Azure MySQL database table. Type: string (or Expression with resultType string). */ - table?: any; + type: "MongoDbV2"; + /** The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). */ + database: any; +}; + +/** Linked service for CosmosDB (MongoDB API) data source. */ +export type CosmosDbMongoDbApiLinkedService = LinkedService & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "CosmosDbMongoDbApi"; + /** The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString: any; + /** The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with resultType string). */ + database: any; }; -/** The Amazon Redshift table dataset. */ -export type AmazonRedshiftTableDataset = Dataset & { +/** Azure Data Lake Store linked service. */ +export type AzureDataLakeStoreLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AmazonRedshiftTable"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The Amazon Redshift table name. Type: string (or Expression with resultType string). */ - table?: any; - /** The Amazon Redshift schema name. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; + type: "AzureDataLakeStore"; + /** Data Lake Store service URI. Type: string (or Expression with resultType string). */ + dataLakeStoreUri: any; + /** The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** The Key of the application used to authenticate against the Azure Data Lake Store account. */ + servicePrincipalKey?: SecretBaseUnion; + /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ + tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; + /** Data Lake Store account name. Type: string (or Expression with resultType string). */ + accountName?: any; + /** Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). */ + subscriptionId?: any; + /** Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). */ + resourceGroupName?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Db2 table dataset. */ -export type Db2TableDataset = Dataset & { +/** Azure Data Lake Storage Gen2 linked service. */ +export type AzureBlobFSLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Db2Table"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The Db2 schema name. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; - /** The Db2 table name. Type: string (or Expression with resultType string). */ - table?: any; + type: "AzureBlobFS"; + /** Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). */ + url: any; + /** Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). */ + accountKey?: any; + /** The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. */ + servicePrincipalKey?: SecretBaseUnion; + /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ + tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The relational table dataset. */ -export type RelationalTableDataset = Dataset & { +/** Office365 linked service. */ +export type Office365LinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "RelationalTable"; - /** The relational table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Office365"; + /** Azure tenant ID to which the Office 365 account belongs. Type: string (or Expression with resultType string). */ + office365TenantId: any; + /** Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression with resultType string). */ + servicePrincipalTenantId: any; + /** Specify the application's client ID. Type: string (or Expression with resultType string). */ + servicePrincipalId: any; + /** Specify the application's key. */ + servicePrincipalKey: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Informix table dataset. */ -export type InformixTableDataset = Dataset & { +/** Linked service for Salesforce. */ +export type SalesforceLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "InformixTable"; - /** The Informix table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Salesforce"; + /** The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */ + environmentUrl?: any; + /** The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). */ + username?: any; + /** The password for Basic authentication of the Salesforce instance. */ + password?: SecretBaseUnion; + /** The security token is optional to remotely access Salesforce instance. */ + securityToken?: SecretBaseUnion; + /** The Salesforce API version used in ADF. Type: string (or Expression with resultType string). */ + apiVersion?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The ODBC table dataset. */ -export type OdbcTableDataset = Dataset & { +/** Linked service for Salesforce Service Cloud. */ +export type SalesforceServiceCloudLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "OdbcTable"; - /** The ODBC table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "SalesforceServiceCloud"; + /** The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). */ + environmentUrl?: any; + /** The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). */ + username?: any; + /** The password for Basic authentication of the Salesforce instance. */ + password?: SecretBaseUnion; + /** The security token is optional to remotely access Salesforce instance. */ + securityToken?: SecretBaseUnion; + /** The Salesforce API version used in ADF. Type: string (or Expression with resultType string). */ + apiVersion?: any; + /** Extended properties appended to the connection string. Type: string (or Expression with resultType string). */ + extendedProperties?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The MySQL table dataset. */ -export type MySqlTableDataset = Dataset & { +/** Linked service for SAP Cloud for Customer. */ +export type SapCloudForCustomerLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MySqlTable"; - /** The MySQL table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "SapCloudForCustomer"; + /** The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). */ + url: any; + /** The username for Basic authentication. Type: string (or Expression with resultType string). */ + username?: any; + /** The password for Basic authentication. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The PostgreSQL table dataset. */ -export type PostgreSqlTableDataset = Dataset & { +/** Linked service for SAP ERP Central Component(SAP ECC). */ +export type SapEccLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "PostgreSqlTable"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The PostgreSQL table name. Type: string (or Expression with resultType string). */ - table?: any; - /** The PostgreSQL schema name. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; + type: "SapEcc"; + /** The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). */ + url: string; + /** The username for Basic authentication. Type: string (or Expression with resultType string). */ + username?: string; + /** The password for Basic authentication. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). */ + encryptedCredential?: string; }; -/** The Microsoft Access table dataset. */ -export type MicrosoftAccessTableDataset = Dataset & { +/** SAP Business Warehouse Open Hub Destination Linked Service. */ +export type SapOpenHubLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MicrosoftAccessTable"; - /** The Microsoft Access table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "SapOpenHub"; + /** Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with resultType string). */ + server: any; + /** System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */ + systemNumber: any; + /** Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */ + clientId: any; + /** Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). */ + language?: any; + /** SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */ + systemId?: any; + /** Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). */ + userName?: any; + /** Password to access the SAP BW server where the open hub destination is located. */ + password?: SecretBaseUnion; + /** The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */ + messageServer?: any; + /** The service name or port number of the Message Server. Type: string (or Expression with resultType string). */ + messageServerService?: any; + /** The Logon Group for the SAP System. Type: string (or Expression with resultType string). */ + logonGroup?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Salesforce object dataset. */ -export type SalesforceObjectDataset = Dataset & { +/** Rest Service linked service. */ +export type RestServiceLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SalesforceObject"; - /** The Salesforce object API name. Type: string (or Expression with resultType string). */ - objectApiName?: any; + type: "RestService"; + /** The base URL of the REST service. */ + url: any; + /** Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). */ + enableServerCertificateValidation?: any; + /** Type of authentication used to connect to the REST service. */ + authenticationType: RestServiceAuthenticationType; + /** The user name used in Basic authentication type. */ + userName?: any; + /** The password used in Basic authentication type. */ + password?: SecretBaseUnion; + /** The application's client ID used in AadServicePrincipal authentication type. */ + servicePrincipalId?: any; + /** The application's key used in AadServicePrincipal authentication type. */ + servicePrincipalKey?: SecretBaseUnion; + /** The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. */ + tenant?: any; + /** Indicates the azure cloud type of the service principle auth. Allowed values are AzurePublic, AzureChina, AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). */ + azureCloudType?: any; + /** The resource you are requesting authorization to use. */ + aadResourceId?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Salesforce Service Cloud object dataset. */ -export type SalesforceServiceCloudObjectDataset = Dataset & { +/** Linked service for Amazon S3. */ +export type AmazonS3LinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SalesforceServiceCloudObject"; - /** The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). */ - objectApiName?: any; + type: "AmazonS3"; + /** The authentication type of S3. Allowed value: AccessKey (default) or TemporarySecurityCredentials. Type: string (or Expression with resultType string). */ + authenticationType?: any; + /** The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). */ + accessKeyId?: any; + /** The secret access key of the Amazon S3 Identity and Access Management (IAM) user. */ + secretAccessKey?: SecretBaseUnion; + /** This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). */ + serviceUrl?: any; + /** The session token for the S3 temporary security credential. */ + sessionToken?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Sybase table dataset. */ -export type SybaseTableDataset = Dataset & { +/** Linked service for Amazon Redshift. */ +export type AmazonRedshiftLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SybaseTable"; - /** The Sybase table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "AmazonRedshift"; + /** The name of the Amazon Redshift server. Type: string (or Expression with resultType string). */ + server: any; + /** The username of the Amazon Redshift source. Type: string (or Expression with resultType string). */ + username?: any; + /** The password of the Amazon Redshift source. */ + password?: SecretBaseUnion; + /** The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). */ + database: any; + /** The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). */ + port?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; +}; + +/** Custom linked service. */ +export type CustomDataSourceLinkedService = LinkedService & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "CustomDataSource"; + /** Custom linked service properties. */ + typeProperties: any; }; -/** The SAP BW cube dataset. */ -export type SapBwCubeDataset = Dataset & { +/** Linked service for Windows Azure Search Service. */ +export type AzureSearchLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SapBwCube"; + type: "AzureSearch"; + /** URL for Azure Search service. Type: string (or Expression with resultType string). */ + url: any; + /** Admin Key for Azure Search service */ + key?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The path of the SAP Cloud for Customer OData entity. */ -export type SapCloudForCustomerResourceDataset = Dataset & { +/** Linked service for an HTTP source. */ +export type HttpLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SapCloudForCustomerResource"; - /** The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). */ - path: any; + type: "HttpServer"; + /** The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). */ + url: any; + /** The authentication type to be used to connect to the HTTP server. */ + authenticationType?: HttpAuthenticationType; + /** User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). */ + userName?: any; + /** Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. */ + password?: SecretBaseUnion; + /** Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). */ + embeddedCertData?: any; + /** Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). */ + certThumbprint?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; + /** If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). */ + enableServerCertificateValidation?: any; }; -/** The path of the SAP ECC OData entity. */ -export type SapEccResourceDataset = Dataset & { +/** A FTP server Linked Service. */ +export type FtpServerLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SapEccResource"; - /** The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). */ - path: any; + type: "FtpServer"; + /** Host name of the FTP server. Type: string (or Expression with resultType string). */ + host: any; + /** The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. */ + port?: any; + /** The authentication type to be used to connect to the FTP server. */ + authenticationType?: FtpAuthenticationType; + /** Username to logon the FTP server. Type: string (or Expression with resultType string). */ + userName?: any; + /** Password to logon the FTP server. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; + /** If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). */ + enableSsl?: any; + /** If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). */ + enableServerCertificateValidation?: any; }; -/** SAP HANA Table properties. */ -export type SapHanaTableDataset = Dataset & { +/** A linked service for an SSH File Transfer Protocol (SFTP) server. */ +export type SftpServerLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SapHanaTable"; - /** The schema name of SAP HANA. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; - /** The table name of SAP HANA. Type: string (or Expression with resultType string). */ - table?: any; + type: "Sftp"; + /** The SFTP server host name. Type: string (or Expression with resultType string). */ + host: any; + /** The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. */ + port?: any; + /** The authentication type to be used to connect to the FTP server. */ + authenticationType?: SftpAuthenticationType; + /** The username used to log on to the SFTP server. Type: string (or Expression with resultType string). */ + userName?: any; + /** Password to logon the SFTP server for Basic authentication. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; + /** The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). */ + privateKeyPath?: any; + /** Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. */ + privateKeyContent?: SecretBaseUnion; + /** The password to decrypt the SSH private key if the SSH private key is encrypted. */ + passPhrase?: SecretBaseUnion; + /** If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). */ + skipHostKeyValidation?: any; + /** The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). */ + hostKeyFingerprint?: any; }; -/** Sap Business Warehouse Open Hub Destination Table properties. */ -export type SapOpenHubTableDataset = Dataset & { +/** SAP Business Warehouse Linked Service. */ +export type SapBWLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SapOpenHubTable"; - /** The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). */ - openHubDestinationName: any; - /** Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). */ - excludeLastRequest?: any; - /** The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). */ - baseRequestId?: any; + type: "SapBW"; + /** Host name of the SAP BW instance. Type: string (or Expression with resultType string). */ + server: any; + /** System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */ + systemNumber: any; + /** Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */ + clientId: any; + /** Username to access the SAP BW server. Type: string (or Expression with resultType string). */ + userName?: any; + /** Password to access the SAP BW server. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The on-premises SQL Server dataset. */ -export type SqlServerTableDataset = Dataset & { +/** SAP HANA Linked Service. */ +export type SapHanaLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SqlServerTable"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; - /** The table name of the SQL Server dataset. Type: string (or Expression with resultType string). */ - table?: any; + type: "SapHana"; + /** SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** Host name of the SAP HANA server. Type: string (or Expression with resultType string). */ + server: any; + /** The authentication type to be used to connect to the SAP HANA server. */ + authenticationType?: SapHanaAuthenticationType; + /** Username to access the SAP HANA server. Type: string (or Expression with resultType string). */ + userName?: any; + /** Password to access the SAP HANA server. */ + password?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** A Rest service dataset. */ -export type RestResourceDataset = Dataset & { +/** Amazon Marketplace Web Service linked service. */ +export type AmazonMWSLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "RestResource"; - /** The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). */ - relativeUrl?: any; - /** The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). */ - requestMethod?: any; - /** The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). */ - requestBody?: any; - /** The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). */ - additionalHeaders?: any; - /** The pagination rules to compose next page requests. Type: string (or Expression with resultType string). */ - paginationRules?: any; + type: "AmazonMWS"; + /** The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com) */ + endpoint: any; + /** The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) */ + marketplaceID: any; + /** The Amazon seller ID. */ + sellerID: any; + /** The Amazon MWS authentication token. */ + mwsAuthToken?: SecretBaseUnion; + /** The access key id used to access data. */ + accessKeyId: any; + /** The secret key used to access data. */ + secretKey?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** SAP Table Resource properties. */ -export type SapTableResourceDataset = Dataset & { +/** Azure PostgreSQL linked service. */ +export type AzurePostgreSqlLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SapTableResource"; - /** The name of the SAP Table. Type: string (or Expression with resultType string). */ - tableName: any; + type: "AzurePostgreSql"; + /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of password in connection string. */ + password?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The dataset points to a HTML table in the web page. */ -export type WebTableDataset = Dataset & { +/** Concur Service linked service. */ +export type ConcurLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "WebTable"; - /** The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. */ - index: any; - /** The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). */ - path?: any; + type: "Concur"; + /** Properties used to connect to Concur. It is mutually exclusive with any other properties in the linked service. Type: object. */ + connectionProperties?: any; + /** Application client_id supplied by Concur App Management. */ + clientId: any; + /** The user name that you use to access Concur Service. */ + username: any; + /** The password corresponding to the user name that you provided in the username field. */ + password?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Azure Search Index. */ -export type AzureSearchIndexDataset = Dataset & { +/** Couchbase server linked service. */ +export type CouchbaseLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureSearchIndex"; - /** The name of the Azure Search Index. Type: string (or Expression with resultType string). */ - indexName: any; + type: "Couchbase"; + /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of credString in connection string. */ + credString?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** A file in an HTTP web server. */ -export type HttpDataset = Dataset & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "HttpFile"; - /** The relative URL based on the URL in the HttpLinkedService refers to an HTTP file Type: string (or Expression with resultType string). */ - relativeUrl?: any; - /** The HTTP method for the HTTP request. Type: string (or Expression with resultType string). */ - requestMethod?: any; - /** The body for the HTTP request. Type: string (or Expression with resultType string). */ - requestBody?: any; - /** - * The headers for the HTTP Request. e.g. request-header-name-1:request-header-value-1 - * ... - * request-header-name-n:request-header-value-n Type: string (or Expression with resultType string). - */ - additionalHeaders?: any; - /** The format of files. */ - format?: DatasetStorageFormatUnion; - /** The data compression method used on files. */ - compression?: DatasetCompressionUnion; +/** Drill server linked service. */ +export type DrillLinkedService = LinkedService & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "Drill"; + /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of password in connection string. */ + pwd?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Amazon Marketplace Web Service dataset. */ -export type AmazonMWSObjectDataset = Dataset & { +/** Eloqua server linked service. */ +export type EloquaLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AmazonMWSObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Eloqua"; + /** The endpoint of the Eloqua server. (i.e. eloqua.example.com) */ + endpoint: any; + /** The site name and user name of your Eloqua account in the form: sitename/username. (i.e. Eloqua/Alice) */ + username: any; + /** The password corresponding to the user name. */ + password?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Azure PostgreSQL dataset. */ -export type AzurePostgreSqlTableDataset = Dataset & { +/** Google BigQuery service linked service. */ +export type GoogleBigQueryLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzurePostgreSqlTable"; - /** The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). */ - tableName?: any; - /** The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). */ - table?: any; - /** The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; + type: "GoogleBigQuery"; + /** The default BigQuery project to query against. */ + project: any; + /** A comma-separated list of public BigQuery projects to access. */ + additionalProjects?: any; + /** Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. */ + requestGoogleDriveScope?: any; + /** The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. */ + authenticationType: GoogleBigQueryAuthenticationType; + /** The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. */ + refreshToken?: SecretBaseUnion; + /** The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). */ + clientId?: any; + /** The client secret of the google application used to acquire the refresh token. */ + clientSecret?: SecretBaseUnion; + /** The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. */ + email?: any; + /** The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. */ + keyFilePath?: any; + /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ + trustedCertPath?: any; + /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ + useSystemTrustStore?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Concur Service dataset. */ -export type ConcurObjectDataset = Dataset & { +/** Greenplum Database linked service. */ +export type GreenplumLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "ConcurObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Greenplum"; + /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of password in connection string. */ + pwd?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Couchbase server dataset. */ -export type CouchbaseTableDataset = Dataset & { +/** HBase server linked service. */ +export type HBaseLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "CouchbaseTable"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "HBase"; + /** The IP address or host name of the HBase server. (i.e. 192.168.222.160) */ + host: any; + /** The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. */ + port?: any; + /** The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version) */ + httpPath?: any; + /** The authentication mechanism to use to connect to the HBase server. */ + authenticationType: HBaseAuthenticationType; + /** The user name used to connect to the HBase instance. */ + username?: any; + /** The password corresponding to the user name. */ + password?: SecretBaseUnion; + /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ + enableSsl?: any; + /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ + trustedCertPath?: any; + /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ + allowHostNameCNMismatch?: any; + /** Specifies whether to allow self-signed certificates from the server. The default value is false. */ + allowSelfSignedServerCert?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Drill server dataset. */ -export type DrillTableDataset = Dataset & { +/** Hive Server linked service. */ +export type HiveLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "DrillTable"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The table name of the Drill. Type: string (or Expression with resultType string). */ - table?: any; - /** The schema name of the Drill. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; + type: "Hive"; + /** IP address or host name of the Hive server, separated by ';' for multiple hosts (only when serviceDiscoveryMode is enable). */ + host: any; + /** The TCP port that the Hive server uses to listen for client connections. */ + port?: any; + /** The type of Hive server. */ + serverType?: HiveServerType; + /** The transport protocol to use in the Thrift layer. */ + thriftTransportProtocol?: HiveThriftTransportProtocol; + /** The authentication method used to access the Hive server. */ + authenticationType: HiveAuthenticationType; + /** true to indicate using the ZooKeeper service, false not. */ + serviceDiscoveryMode?: any; + /** The namespace on ZooKeeper under which Hive Server 2 nodes are added. */ + zooKeeperNameSpace?: any; + /** Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. */ + useNativeQuery?: any; + /** The user name that you use to access Hive Server. */ + username?: any; + /** The password corresponding to the user name that you provided in the Username field */ + password?: SecretBaseUnion; + /** The partial URL corresponding to the Hive server. */ + httpPath?: any; + /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ + enableSsl?: any; + /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ + trustedCertPath?: any; + /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ + useSystemTrustStore?: any; + /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ + allowHostNameCNMismatch?: any; + /** Specifies whether to allow self-signed certificates from the server. The default value is false. */ + allowSelfSignedServerCert?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Eloqua server dataset. */ -export type EloquaObjectDataset = Dataset & { +/** Hubspot Service linked service. */ +export type HubspotLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "EloquaObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Hubspot"; + /** The client ID associated with your Hubspot application. */ + clientId: any; + /** The client secret associated with your Hubspot application. */ + clientSecret?: SecretBaseUnion; + /** The access token obtained when initially authenticating your OAuth integration. */ + accessToken?: SecretBaseUnion; + /** The refresh token obtained when initially authenticating your OAuth integration. */ + refreshToken?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Google BigQuery service dataset. */ -export type GoogleBigQueryObjectDataset = Dataset & { +/** Impala server linked service. */ +export type ImpalaLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "GoogleBigQueryObject"; - /** This property will be retired. Please consider using database + table properties instead. */ - tableName?: any; - /** The table name of the Google BigQuery. Type: string (or Expression with resultType string). */ - table?: any; - /** The database name of the Google BigQuery. Type: string (or Expression with resultType string). */ - dataset?: any; + type: "Impala"; + /** The IP address or host name of the Impala server. (i.e. 192.168.222.160) */ + host: any; + /** The TCP port that the Impala server uses to listen for client connections. The default value is 21050. */ + port?: any; + /** The authentication type to use. */ + authenticationType: ImpalaAuthenticationType; + /** The user name used to access the Impala server. The default value is anonymous when using SASLUsername. */ + username?: any; + /** The password corresponding to the user name when using UsernameAndPassword. */ + password?: SecretBaseUnion; + /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ + enableSsl?: any; + /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ + trustedCertPath?: any; + /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ + useSystemTrustStore?: any; + /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ + allowHostNameCNMismatch?: any; + /** Specifies whether to allow self-signed certificates from the server. The default value is false. */ + allowSelfSignedServerCert?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Greenplum Database dataset. */ -export type GreenplumTableDataset = Dataset & { +/** Jira Service linked service. */ +export type JiraLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "GreenplumTable"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The table name of Greenplum. Type: string (or Expression with resultType string). */ - table?: any; - /** The schema name of Greenplum. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; + type: "Jira"; + /** The IP address or host name of the Jira service. (e.g. jira.example.com) */ + host: any; + /** The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. */ + port?: any; + /** The user name that you use to access Jira Service. */ + username: any; + /** The password corresponding to the user name that you provided in the username field. */ + password?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** HBase server dataset. */ -export type HBaseObjectDataset = Dataset & { +/** Magento server linked service. */ +export type MagentoLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "HBaseObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Magento"; + /** The URL of the Magento instance. (i.e. 192.168.222.110/magento3) */ + host: any; + /** The access token from Magento. */ + accessToken?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Hive Server dataset. */ -export type HiveObjectDataset = Dataset & { +/** MariaDB server linked service. */ +export type MariaDBLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "HiveObject"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The table name of the Hive. Type: string (or Expression with resultType string). */ - table?: any; - /** The schema name of the Hive. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; + type: "MariaDB"; + /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of password in connection string. */ + pwd?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Hubspot Service dataset. */ -export type HubspotObjectDataset = Dataset & { +/** Azure Database for MariaDB linked service. */ +export type AzureMariaDBLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "HubspotObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "AzureMariaDB"; + /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of password in connection string. */ + pwd?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Impala server dataset. */ -export type ImpalaObjectDataset = Dataset & { +/** Marketo server linked service. */ +export type MarketoLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "ImpalaObject"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The table name of the Impala. Type: string (or Expression with resultType string). */ - table?: any; - /** The schema name of the Impala. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; + type: "Marketo"; + /** The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com) */ + endpoint: any; + /** The client Id of your Marketo service. */ + clientId: any; + /** The client secret of your Marketo service. */ + clientSecret?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Jira Service dataset. */ -export type JiraObjectDataset = Dataset & { +/** Paypal Service linked service. */ +export type PaypalLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "JiraObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Paypal"; + /** The URL of the PayPal instance. (i.e. api.sandbox.paypal.com) */ + host: any; + /** The client ID associated with your PayPal application. */ + clientId: any; + /** The client secret associated with your PayPal application. */ + clientSecret?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Magento server dataset. */ -export type MagentoObjectDataset = Dataset & { +/** Phoenix server linked service. */ +export type PhoenixLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MagentoObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Phoenix"; + /** The IP address or host name of the Phoenix server. (i.e. 192.168.222.160) */ + host: any; + /** The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. */ + port?: any; + /** The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. */ + httpPath?: any; + /** The authentication mechanism used to connect to the Phoenix server. */ + authenticationType: PhoenixAuthenticationType; + /** The user name used to connect to the Phoenix server. */ + username?: any; + /** The password corresponding to the user name. */ + password?: SecretBaseUnion; + /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ + enableSsl?: any; + /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ + trustedCertPath?: any; + /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ + useSystemTrustStore?: any; + /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ + allowHostNameCNMismatch?: any; + /** Specifies whether to allow self-signed certificates from the server. The default value is false. */ + allowSelfSignedServerCert?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** MariaDB server dataset. */ -export type MariaDBTableDataset = Dataset & { +/** Presto server linked service. */ +export type PrestoLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MariaDBTable"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Presto"; + /** The IP address or host name of the Presto server. (i.e. 192.168.222.160) */ + host: any; + /** The version of the Presto server. (i.e. 0.148-t) */ + serverVersion: any; + /** The catalog context for all request against the server. */ + catalog: any; + /** The TCP port that the Presto server uses to listen for client connections. The default value is 8080. */ + port?: any; + /** The authentication mechanism used to connect to the Presto server. */ + authenticationType: PrestoAuthenticationType; + /** The user name used to connect to the Presto server. */ + username?: any; + /** The password corresponding to the user name. */ + password?: SecretBaseUnion; + /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ + enableSsl?: any; + /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ + trustedCertPath?: any; + /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ + useSystemTrustStore?: any; + /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ + allowHostNameCNMismatch?: any; + /** Specifies whether to allow self-signed certificates from the server. The default value is false. */ + allowSelfSignedServerCert?: any; + /** The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. */ + timeZoneID?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Azure Database for MariaDB dataset. */ -export type AzureMariaDBTableDataset = Dataset & { +/** QuickBooks server linked service. */ +export type QuickBooksLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureMariaDBTable"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "QuickBooks"; + /** Properties used to connect to QuickBooks. It is mutually exclusive with any other properties in the linked service. Type: object. */ + connectionProperties?: any; + /** The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com) */ + endpoint: any; + /** The company ID of the QuickBooks company to authorize. */ + companyId: any; + /** The consumer key for OAuth 1.0 authentication. */ + consumerKey: any; + /** The consumer secret for OAuth 1.0 authentication. */ + consumerSecret: SecretBaseUnion; + /** The access token for OAuth 1.0 authentication. */ + accessToken: SecretBaseUnion; + /** The access token secret for OAuth 1.0 authentication. */ + accessTokenSecret: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Marketo server dataset. */ -export type MarketoObjectDataset = Dataset & { +/** ServiceNow server linked service. */ +export type ServiceNowLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MarketoObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "ServiceNow"; + /** The endpoint of the ServiceNow server. (i.e. .service-now.com) */ + endpoint: any; + /** The authentication type to use. */ + authenticationType: ServiceNowAuthenticationType; + /** The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. */ + username?: any; + /** The password corresponding to the user name for Basic and OAuth2 authentication. */ + password?: SecretBaseUnion; + /** The client id for OAuth2 authentication. */ + clientId?: any; + /** The client secret for OAuth2 authentication. */ + clientSecret?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Paypal Service dataset. */ -export type PaypalObjectDataset = Dataset & { +/** Shopify Service linked service. */ +export type ShopifyLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "PaypalObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Shopify"; + /** The endpoint of the Shopify server. (i.e. mystore.myshopify.com) */ + host: any; + /** The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. */ + accessToken?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Phoenix server dataset. */ -export type PhoenixObjectDataset = Dataset & { +/** Spark Server linked service. */ +export type SparkLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "PhoenixObject"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The table name of the Phoenix. Type: string (or Expression with resultType string). */ - table?: any; - /** The schema name of the Phoenix. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; + type: "Spark"; + /** IP address or host name of the Spark server */ + host: any; + /** The TCP port that the Spark server uses to listen for client connections. */ + port: any; + /** The type of Spark server. */ + serverType?: SparkServerType; + /** The transport protocol to use in the Thrift layer. */ + thriftTransportProtocol?: SparkThriftTransportProtocol; + /** The authentication method used to access the Spark server. */ + authenticationType: SparkAuthenticationType; + /** The user name that you use to access Spark Server. */ + username?: any; + /** The password corresponding to the user name that you provided in the Username field */ + password?: SecretBaseUnion; + /** The partial URL corresponding to the Spark server. */ + httpPath?: any; + /** Specifies whether the connections to the server are encrypted using SSL. The default value is false. */ + enableSsl?: any; + /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ + trustedCertPath?: any; + /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ + useSystemTrustStore?: any; + /** Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. */ + allowHostNameCNMismatch?: any; + /** Specifies whether to allow self-signed certificates from the server. The default value is false. */ + allowSelfSignedServerCert?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Presto server dataset. */ -export type PrestoObjectDataset = Dataset & { +/** Square Service linked service. */ +export type SquareLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "PrestoObject"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The table name of the Presto. Type: string (or Expression with resultType string). */ - table?: any; - /** The schema name of the Presto. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; + type: "Square"; + /** Properties used to connect to Square. It is mutually exclusive with any other properties in the linked service. Type: object. */ + connectionProperties?: any; + /** The URL of the Square instance. (i.e. mystore.mysquare.com) */ + host: any; + /** The client ID associated with your Square application. */ + clientId: any; + /** The client secret associated with your Square application. */ + clientSecret?: SecretBaseUnion; + /** The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500) */ + redirectUri: any; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** QuickBooks server dataset. */ -export type QuickBooksObjectDataset = Dataset & { +/** Xero Service linked service. */ +export type XeroLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "QuickBooksObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Xero"; + /** Properties used to connect to Xero. It is mutually exclusive with any other properties in the linked service. Type: object. */ + connectionProperties?: any; + /** The endpoint of the Xero server. (i.e. api.xero.com) */ + host: any; + /** The consumer key associated with the Xero application. */ + consumerKey?: SecretBaseUnion; + /** + * The private key from the .pem file that was generated for your Xero private application. You must include all the text from the .pem file, including the Unix line endings( + * ). + */ + privateKey?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** ServiceNow server dataset. */ -export type ServiceNowObjectDataset = Dataset & { +/** Zoho server linked service. */ +export type ZohoLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "ServiceNowObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Zoho"; + /** Properties used to connect to Zoho. It is mutually exclusive with any other properties in the linked service. Type: object. */ + connectionProperties?: any; + /** The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private) */ + endpoint: any; + /** The access token for Zoho authentication. */ + accessToken?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Shopify Service dataset. */ -export type ShopifyObjectDataset = Dataset & { +/** Vertica linked service. */ +export type VerticaLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "ShopifyObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Vertica"; + /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of password in connection string. */ + pwd?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Spark Server dataset. */ -export type SparkObjectDataset = Dataset & { +/** Netezza linked service. */ +export type NetezzaLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SparkObject"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The table name of the Spark. Type: string (or Expression with resultType string). */ - table?: any; - /** The schema name of the Spark. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; + type: "Netezza"; + /** An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. */ + connectionString?: any; + /** The Azure key vault secret reference of password in connection string. */ + pwd?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Square Service dataset. */ -export type SquareObjectDataset = Dataset & { +/** Salesforce Marketing Cloud linked service. */ +export type SalesforceMarketingCloudLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SquareObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "SalesforceMarketingCloud"; + /** Properties used to connect to Salesforce Marketing Cloud. It is mutually exclusive with any other properties in the linked service. Type: object. */ + connectionProperties?: any; + /** The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). */ + clientId: any; + /** The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). */ + clientSecret?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Xero Service dataset. */ -export type XeroObjectDataset = Dataset & { +/** HDInsight ondemand linked service. */ +export type HDInsightOnDemandLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "XeroObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "HDInsightOnDemand"; + /** Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). */ + clusterSize: any; + /** The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). */ + timeToLive: any; + /** Version of the HDInsight cluster.  Type: string (or Expression with resultType string). */ + version: any; + /** Azure Storage linked service to be used by the on-demand cluster for storing and processing data. */ + linkedServiceName: LinkedServiceReference; + /** The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). */ + hostSubscriptionId: any; + /** The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** The key for the service principal id. */ + servicePrincipalKey?: SecretBaseUnion; + /** The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). */ + tenant: any; + /** The resource group where the cluster belongs. Type: string (or Expression with resultType string). */ + clusterResourceGroup: any; + /** The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType string). */ + clusterNamePrefix?: any; + /** The username to access the cluster. Type: string (or Expression with resultType string). */ + clusterUserName?: any; + /** The password to access the cluster. */ + clusterPassword?: SecretBaseUnion; + /** The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). */ + clusterSshUserName?: any; + /** The password to SSH remotely connect cluster’s node (for Linux). */ + clusterSshPassword?: SecretBaseUnion; + /** Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can register them on your behalf. */ + additionalLinkedServiceNames?: LinkedServiceReference[]; + /** The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database as the metastore. */ + hcatalogLinkedServiceName?: LinkedServiceReference; + /** The cluster type. Type: string (or Expression with resultType string). */ + clusterType?: any; + /** The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). */ + sparkVersion?: any; + /** Specifies the core configuration parameters (as in core-site.xml) for the HDInsight cluster to be created. */ + coreConfiguration?: any; + /** Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. */ + hBaseConfiguration?: any; + /** Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. */ + hdfsConfiguration?: any; + /** Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. */ + hiveConfiguration?: any; + /** Specifies the MapReduce configuration parameters (mapred-site.xml) for the HDInsight cluster. */ + mapReduceConfiguration?: any; + /** Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. */ + oozieConfiguration?: any; + /** Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. */ + stormConfiguration?: any; + /** Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. */ + yarnConfiguration?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; + /** Specifies the size of the head node for the HDInsight cluster. */ + headNodeSize?: any; + /** Specifies the size of the data node for the HDInsight cluster. */ + dataNodeSize?: any; + /** Specifies the size of the Zoo Keeper node for the HDInsight cluster. */ + zookeeperNodeSize?: any; + /** Custom script actions to run on HDI ondemand cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. */ + scriptActions?: ScriptAction[]; + /** The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). */ + virtualNetworkId?: any; + /** The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). */ + subnetName?: any; }; -/** Zoho server dataset. */ -export type ZohoObjectDataset = Dataset & { +/** Azure Data Lake Analytics linked service. */ +export type AzureDataLakeAnalyticsLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "ZohoObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "AzureDataLakeAnalytics"; + /** The Azure Data Lake Analytics account name. Type: string (or Expression with resultType string). */ + accountName: any; + /** The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** The Key of the application used to authenticate against the Azure Data Lake Analytics account. */ + servicePrincipalKey?: SecretBaseUnion; + /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ + tenant: any; + /** Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). */ + subscriptionId?: any; + /** Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). */ + resourceGroupName?: any; + /** Azure Data Lake Analytics URI Type: string (or Expression with resultType string). */ + dataLakeAnalyticsUri?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Netezza dataset. */ -export type NetezzaTableDataset = Dataset & { +/** Azure Databricks linked service. */ +export type AzureDatabricksLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "NetezzaTable"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The table name of the Netezza. Type: string (or Expression with resultType string). */ - table?: any; - /** The schema name of the Netezza. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; + type: "AzureDatabricks"; + /** .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). */ + domain: any; + /** Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). */ + accessToken?: SecretBaseUnion; + /** Required to specify MSI, if using Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */ + authentication?: any; + /** Workspace resource id for databricks REST API. Type: string (or Expression with resultType string). */ + workspaceResourceId?: any; + /** The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). */ + existingClusterId?: any; + /** The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). */ + instancePoolId?: any; + /** If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). */ + newClusterVersion?: any; + /** If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). */ + newClusterNumOfWorker?: any; + /** The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). */ + newClusterNodeType?: any; + /** A set of optional, user-specified Spark configuration key-value pairs. */ + newClusterSparkConf?: { [propertyName: string]: any }; + /** A set of optional, user-specified Spark environment variables key-value pairs. */ + newClusterSparkEnvVars?: { [propertyName: string]: any }; + /** Additional tags for cluster resources. This property is ignored in instance pool configurations. */ + newClusterCustomTags?: { [propertyName: string]: any }; + /** Specify a location to deliver Spark driver, worker, and event logs. Type: string (or Expression with resultType string). */ + newClusterLogDestination?: any; + /** The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). */ + newClusterDriverNodeType?: any; + /** User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). */ + newClusterInitScripts?: any; + /** Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). */ + newClusterEnableElasticDisk?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; + /** The policy id for limiting the ability to configure clusters based on a user defined set of rules. Type: string (or Expression with resultType string). */ + policyId?: any; }; -/** Vertica dataset. */ -export type VerticaTableDataset = Dataset & { +/** Azure Databricks Delta Lake linked service. */ +export type AzureDatabricksDeltaLakeLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "VerticaTable"; - /** This property will be retired. Please consider using schema + table properties instead. */ - tableName?: any; - /** The table name of the Vertica. Type: string (or Expression with resultType string). */ - table?: any; - /** The schema name of the Vertica. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; + type: "AzureDatabricksDeltaLake"; + /** .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). */ + domain: any; + /** Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or AzureKeyVaultSecretReference. */ + accessToken: SecretBaseUnion; + /** The id of an existing interactive cluster that will be used for all runs of this job. Type: string (or Expression with resultType string). */ + clusterId?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Salesforce Marketing Cloud dataset. */ -export type SalesforceMarketingCloudObjectDataset = Dataset & { +/** Responsys linked service. */ +export type ResponsysLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SalesforceMarketingCloudObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "Responsys"; + /** The endpoint of the Responsys server. */ + endpoint: any; + /** The client ID associated with the Responsys application. Type: string (or Expression with resultType string). */ + clientId: any; + /** The client secret associated with the Responsys application. Type: string (or Expression with resultType string). */ + clientSecret?: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Responsys dataset. */ -export type ResponsysObjectDataset = Dataset & { +/** Dynamics AX linked service. */ +export type DynamicsAXLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "ResponsysObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "DynamicsAX"; + /** The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. */ + url: any; + /** Specify the application's client ID. Type: string (or Expression with resultType string). */ + servicePrincipalId: any; + /** Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). */ + servicePrincipalKey: SecretBaseUnion; + /** Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). */ + tenant: any; + /** Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). */ + aadResourceId: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The path of the Dynamics AX OData entity. */ -export type DynamicsAXResourceDataset = Dataset & { +/** Oracle Service Cloud linked service. */ +export type OracleServiceCloudLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "DynamicsAXResource"; - /** The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). */ - path: any; + type: "OracleServiceCloud"; + /** The URL of the Oracle Service Cloud instance. */ + host: any; + /** The user name that you use to access Oracle Service Cloud server. */ + username: any; + /** The password corresponding to the user name that you provided in the username key. */ + password: SecretBaseUnion; + /** Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). */ + useEncryptedEndpoints?: any; + /** Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ + useHostVerification?: any; + /** Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). */ + usePeerVerification?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Oracle Service Cloud dataset. */ -export type OracleServiceCloudObjectDataset = Dataset & { +/** Google AdWords service linked service. */ +export type GoogleAdWordsLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "OracleServiceCloudObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "GoogleAdWords"; + /** The Client customer ID of the AdWords account that you want to fetch report data for. */ + clientCustomerID: any; + /** The developer token associated with the manager account that you use to grant access to the AdWords API. */ + developerToken: SecretBaseUnion; + /** The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. */ + authenticationType: GoogleAdWordsAuthenticationType; + /** The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. */ + refreshToken?: SecretBaseUnion; + /** The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). */ + clientId?: any; + /** The client secret of the google application used to acquire the refresh token. */ + clientSecret?: SecretBaseUnion; + /** The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. */ + email?: any; + /** The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. */ + keyFilePath?: any; + /** The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. */ + trustedCertPath?: any; + /** Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. */ + useSystemTrustStore?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The Azure Data Explorer (Kusto) dataset. */ -export type AzureDataExplorerTableDataset = Dataset & { +/** SAP Table Linked Service. */ +export type SapTableLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureDataExplorerTable"; - /** The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). */ - table?: any; + type: "SapTable"; + /** Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). */ + server?: any; + /** System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). */ + systemNumber?: any; + /** Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). */ + clientId?: any; + /** Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). */ + language?: any; + /** SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). */ + systemId?: any; + /** Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ + userName?: any; + /** Password to access the SAP server where the table is located. */ + password?: SecretBaseUnion; + /** The hostname of the SAP Message Server. Type: string (or Expression with resultType string). */ + messageServer?: any; + /** The service name or port number of the Message Server. Type: string (or Expression with resultType string). */ + messageServerService?: any; + /** SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). */ + sncMode?: any; + /** Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ + sncMyName?: any; + /** Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ + sncPartnerName?: any; + /** External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). */ + sncLibraryPath?: any; + /** SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). */ + sncQop?: any; + /** The Logon Group for the SAP System. Type: string (or Expression with resultType string). */ + logonGroup?: any; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Google AdWords service dataset. */ -export type GoogleAdWordsObjectDataset = Dataset & { +/** Azure Data Explorer (Kusto) linked service. */ +export type AzureDataExplorerLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "GoogleAdWordsObject"; - /** The table name. Type: string (or Expression with resultType string). */ - tableName?: any; + type: "AzureDataExplorer"; + /** The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format https://..kusto.windows.net. Type: string (or Expression with resultType string) */ + endpoint: any; + /** The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). */ + servicePrincipalId?: any; + /** The key of the service principal used to authenticate against Kusto. */ + servicePrincipalKey?: SecretBaseUnion; + /** Database name for connection. Type: string (or Expression with resultType string). */ + database: any; + /** The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). */ + tenant?: any; }; -/** The snowflake dataset. */ -export type SnowflakeDataset = Dataset & { +/** Azure Function linked service. */ +export type AzureFunctionLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SnowflakeTable"; - /** The schema name of the Snowflake database. Type: string (or Expression with resultType string). */ - schemaTypePropertiesSchema?: any; - /** The table name of the Snowflake database. Type: string (or Expression with resultType string). */ - table?: any; + type: "AzureFunction"; + /** The endpoint of the Azure Function App. URL will be in the format https://.azurewebsites.net. */ + functionAppUrl: any; + /** Function or Host key for Azure Function App. */ + functionKey?: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** The sharepoint online list resource dataset. */ -export type SharePointOnlineListResourceDataset = Dataset & { +/** Snowflake linked service. */ +export type SnowflakeLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SharePointOnlineListResource"; - /** The name of the SharePoint Online list. Type: string (or Expression with resultType string). */ - listName?: any; + type: "Snowflake"; + /** The connection string of snowflake. Type: string, SecureString. */ + connectionString: any; + /** The Azure key vault secret reference of password in connection string. */ + password?: AzureKeyVaultSecretReference; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; -/** Azure Databricks Delta Lake dataset. */ -export type AzureDatabricksDeltaLakeDataset = Dataset & { +/** SharePoint Online List linked service. */ +export type SharePointOnlineListLinkedService = LinkedService & { /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "AzureDatabricksDeltaLakeDataset"; - /** The name of delta table. Type: string (or Expression with resultType string). */ - table?: any; - /** The database name of delta table. Type: string (or Expression with resultType string). */ - database?: any; + type: "SharePointOnlineList"; + /** The URL of the SharePoint Online site. For example, https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType string). */ + siteUrl: any; + /** The tenant ID under which your application resides. You can find it from Azure portal Active Directory overview page. Type: string (or Expression with resultType string). */ + tenantId: any; + /** The application (client) ID of your application registered in Azure Active Directory. Make sure to grant SharePoint site permission to this application. Type: string (or Expression with resultType string). */ + servicePrincipalId: any; + /** The client secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). */ + servicePrincipalKey: SecretBaseUnion; + /** The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). */ + encryptedCredential?: any; }; /** Base class for all control activities like IfCondition, ForEach , Until. */ @@ -6425,63 +6665,6 @@ export type ChainingTrigger = Trigger & { runDimension: string; }; -/** Mapping data flow. */ -export type MappingDataFlow = DataFlow & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "MappingDataFlow"; - /** List of sources in data flow. */ - sources?: DataFlowSource[]; - /** List of sinks in data flow. */ - sinks?: DataFlowSink[]; - /** List of transformations in data flow. */ - transformations?: Transformation[]; - /** DataFlow script. */ - script?: string; -}; - -/** Data flow debug resource. */ -export type DataFlowDebugResource = SubResourceDebugResource & { - /** Data flow properties. */ - properties: DataFlowUnion; -}; - -/** Dataset debug resource. */ -export type DatasetDebugResource = SubResourceDebugResource & { - /** Dataset properties. */ - properties: DatasetUnion; -}; - -/** Linked service debug resource. */ -export type LinkedServiceDebugResource = SubResourceDebugResource & { - /** Properties of linked service. */ - properties: LinkedServiceUnion; -}; - -/** Managed integration runtime, including managed elastic and managed dedicated integration runtimes. */ -export type ManagedIntegrationRuntime = IntegrationRuntime & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "Managed"; - /** - * Integration runtime state, only valid for managed dedicated integration runtime. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly state?: IntegrationRuntimeState; - /** Managed Virtual Network reference. */ - managedVirtualNetwork?: ManagedVirtualNetworkReference; - /** The compute resource for managed integration runtime. */ - computeProperties?: IntegrationRuntimeComputeProperties; - /** SSIS properties for managed integration runtime. */ - ssisProperties?: IntegrationRuntimeSsisProperties; -}; - -/** Self-hosted integration runtime. */ -export type SelfHostedIntegrationRuntime = IntegrationRuntime & { - /** Polymorphic discriminator, which specifies the different types this object can be */ - type: "SelfHosted"; - /** Linked integration runtime type from data factory */ - linkedInfo?: LinkedIntegrationRuntimeTypeUnion; -}; - /** Azure Synapse secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. */ export type SecureString = SecretBase & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -6631,7 +6814,7 @@ export type JsonFormat = DatasetStorageFormat & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "JsonFormat"; /** File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. */ - filePattern?: JsonFormatFilePattern; + filePattern?: any; /** The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). */ nestingSeparator?: any; /** The code page name of the preferred encoding. If not provided, the default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full list of supported values can be found in the 'Name' column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). */ @@ -6670,7 +6853,7 @@ export type DatasetBZip2Compression = DatasetCompression & { export type DatasetGZipCompression = DatasetCompression & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "GZip"; - /** The GZip compression level. */ + /** The GZip compression level. Type: string (or Expression with resultType string). */ level?: any; }; @@ -6678,7 +6861,7 @@ export type DatasetGZipCompression = DatasetCompression & { export type DatasetDeflateCompression = DatasetCompression & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "Deflate"; - /** The Deflate compression level. */ + /** The Deflate compression level. Type: string (or Expression with resultType string). */ level?: any; }; @@ -6686,7 +6869,7 @@ export type DatasetDeflateCompression = DatasetCompression & { export type DatasetZipDeflateCompression = DatasetCompression & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "ZipDeflate"; - /** The ZipDeflate compression level. */ + /** The ZipDeflate compression level. Type: string (or Expression with resultType string). */ level?: any; }; @@ -6700,7 +6883,7 @@ export type DatasetTarCompression = DatasetCompression & { export type DatasetTarGZipCompression = DatasetCompression & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "TarGZip"; - /** The TarGZip compression level. */ + /** The TarGZip compression level. Type: string (or Expression with resultType string). */ level?: any; }; @@ -7167,7 +7350,7 @@ export type JsonWriteSettings = FormatWriteSettings & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "JsonWriteSettings"; /** File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. */ - filePattern?: JsonWriteFilePattern; + filePattern?: any; }; /** A copy activity Avro source. */ @@ -7176,8 +7359,8 @@ export type AvroSource = CopySource & { type: "AvroSource"; /** Avro store settings. */ storeSettings?: StoreReadSettingsUnion; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity excel source. */ @@ -7186,8 +7369,8 @@ export type ExcelSource = CopySource & { type: "ExcelSource"; /** Excel store settings. */ storeSettings?: StoreReadSettingsUnion; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity Parquet source. */ @@ -7196,8 +7379,8 @@ export type ParquetSource = CopySource & { type: "ParquetSource"; /** Parquet store settings. */ storeSettings?: StoreReadSettingsUnion; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity DelimitedText source. */ @@ -7208,8 +7391,8 @@ export type DelimitedTextSource = CopySource & { storeSettings?: StoreReadSettingsUnion; /** DelimitedText format settings. */ formatSettings?: DelimitedTextReadSettings; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity Json source. */ @@ -7220,8 +7403,8 @@ export type JsonSource = CopySource & { storeSettings?: StoreReadSettingsUnion; /** Json format settings. */ formatSettings?: JsonReadSettings; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity Xml source. */ @@ -7232,8 +7415,8 @@ export type XmlSource = CopySource & { storeSettings?: StoreReadSettingsUnion; /** Xml format settings. */ formatSettings?: XmlReadSettings; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity ORC source. */ @@ -7242,8 +7425,8 @@ export type OrcSource = CopySource & { type: "OrcSource"; /** ORC store settings. */ storeSettings?: StoreReadSettingsUnion; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity Binary source. */ @@ -7277,6 +7460,7 @@ export type TabularSource = CopySource & { | "SapTableSource" | "SqlSource" | "SqlServerSource" + | "AmazonRdsForSqlServerSource" | "AzureSqlSource" | "SqlMISource" | "SqlDWSource" @@ -7320,8 +7504,8 @@ export type TabularSource = CopySource & { | "AmazonRedshiftSource"; /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ queryTimeout?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity Azure Blob source. */ @@ -7346,8 +7530,8 @@ export type DocumentDbCollectionSource = CopySource & { nestingSeparator?: any; /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ queryTimeout?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity Azure CosmosDB (SQL API) Collection source. */ @@ -7362,8 +7546,8 @@ export type CosmosDbSqlApiSource = CopySource & { preferredRegions?: any; /** Whether detect primitive values as datetime values. Type: boolean (or Expression with resultType boolean). */ detectDatetime?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity Dynamics source. */ @@ -7372,8 +7556,8 @@ export type DynamicsSource = CopySource & { type: "DynamicsSource"; /** FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). */ query?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity Dynamics CRM source. */ @@ -7382,8 +7566,8 @@ export type DynamicsCrmSource = CopySource & { type: "DynamicsCrmSource"; /** FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). */ query?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity Common Data Service for Apps source. */ @@ -7392,8 +7576,8 @@ export type CommonDataServiceForAppsSource = CopySource & { type: "CommonDataServiceForAppsSource"; /** FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). */ query?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity source for various relational databases. */ @@ -7402,8 +7586,8 @@ export type RelationalSource = CopySource & { type: "RelationalSource"; /** Database query. Type: string (or Expression with resultType string). */ query?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity source for Microsoft Access. */ @@ -7412,8 +7596,8 @@ export type MicrosoftAccessSource = CopySource & { type: "MicrosoftAccessSource"; /** Database query. Type: string (or Expression with resultType string). */ query?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity source for OData source. */ @@ -7424,8 +7608,8 @@ export type ODataSource = CopySource & { query?: any; /** The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:05:00. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ httpRequestTimeout?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity Salesforce Service Cloud source. */ @@ -7436,8 +7620,8 @@ export type SalesforceServiceCloudSource = CopySource & { query?: any; /** The read behavior for the operation. Default is Query. */ readBehavior?: SalesforceSourceReadBehavior; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity Rest service source. */ @@ -7456,8 +7640,8 @@ export type RestSource = CopySource & { httpRequestTimeout?: any; /** The time to await before sending next page request. */ requestInterval?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity file system source. */ @@ -7466,8 +7650,8 @@ export type FileSystemSource = CopySource & { type: "FileSystemSource"; /** If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). */ recursive?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity HDFS source. */ @@ -7490,8 +7674,8 @@ export type AzureDataExplorerSource = CopySource & { noTruncation?: any; /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. */ queryTimeout?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity Oracle source. */ @@ -7506,16 +7690,32 @@ export type OracleSource = CopySource & { partitionOption?: OraclePartitionOption; /** The settings that will be leveraged for Oracle source partitioning. */ partitionSettings?: OraclePartitionSettings; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; +}; + +/** A copy activity AmazonRdsForOracle source. */ +export type AmazonRdsForOracleSource = CopySource & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AmazonRdsForOracleSource"; + /** AmazonRdsForOracle reader query. Type: string (or Expression with resultType string). */ + oracleReaderQuery?: any; + /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ + queryTimeout?: any; + /** The partition mechanism that will be used for AmazonRdsForOracle read in parallel. Type: string (or Expression with resultType string). */ + partitionOption?: any; + /** The settings that will be leveraged for AmazonRdsForOracle source partitioning. */ + partitionSettings?: AmazonRdsForOraclePartitionSettings; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity source for web page table. */ export type WebSource = CopySource & { /** Polymorphic discriminator, which specifies the different types this object can be */ type: "WebSource"; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity source for a MongoDB database. */ @@ -7524,8 +7724,8 @@ export type MongoDbSource = CopySource & { type: "MongoDbSource"; /** Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). */ query?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity source for a MongoDB Atlas database. */ @@ -7540,8 +7740,8 @@ export type MongoDbAtlasSource = CopySource & { batchSize?: any; /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ queryTimeout?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity source for a MongoDB database. */ @@ -7556,8 +7756,8 @@ export type MongoDbV2Source = CopySource & { batchSize?: any; /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ queryTimeout?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity source for a CosmosDB (MongoDB API) database. */ @@ -7572,8 +7772,8 @@ export type CosmosDbMongoDbApiSource = CopySource & { batchSize?: any; /** Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). */ queryTimeout?: any; - /** Specifies the additional columns to be added to source data. Type: array of objects (or Expression with resultType array of objects). */ - additionalColumns?: AdditionalColumns[]; + /** Specifies the additional columns to be added to source data. Type: array of objects(AdditionalColumns) (or Expression with resultType array of objects). */ + additionalColumns?: any; }; /** A copy activity source for an Office 365 service. */ @@ -8167,6 +8367,73 @@ export type LinkedIntegrationRuntimeRbacAuthorization = LinkedIntegrationRuntime /** Azure Synapse nested resource, which belongs to a workspace. */ export type SubResource = AzureEntityResource & {}; +/** A Big Data pool */ +export type BigDataPoolResourceInfo = TrackedResource & { + /** The state of the Big Data pool. */ + provisioningState?: string; + /** Auto-scaling properties */ + autoScale?: AutoScaleProperties; + /** The time when the Big Data pool was created. */ + creationDate?: Date; + /** Auto-pausing properties */ + autoPause?: AutoPauseProperties; + /** Whether compute isolation is required or not. */ + isComputeIsolationEnabled?: boolean; + /** Whether session level packages enabled. */ + sessionLevelPackagesEnabled?: boolean; + /** The cache size */ + cacheSize?: number; + /** Dynamic Executor Allocation */ + dynamicExecutorAllocation?: DynamicExecutorAllocation; + /** The Spark events folder */ + sparkEventsFolder?: string; + /** The number of nodes in the Big Data pool. */ + nodeCount?: number; + /** Library version requirements */ + libraryRequirements?: LibraryRequirements; + /** List of custom libraries/packages associated with the spark pool. */ + customLibraries?: LibraryInfo[]; + /** Spark configuration file to specify additional properties */ + sparkConfigProperties?: LibraryRequirements; + /** The Apache Spark version. */ + sparkVersion?: string; + /** The default folder where Spark logs will be written. */ + defaultSparkLogFolder?: string; + /** The level of compute power that each node in the Big Data pool has. */ + nodeSize?: NodeSize; + /** The kind of nodes that the Big Data pool provides. */ + nodeSizeFamily?: NodeSizeFamily; + /** + * The time when the Big Data pool was updated successfully. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly lastSucceededTimestamp?: Date; +}; + +/** A SQL Analytics pool */ +export type SqlPool = TrackedResource & { + /** SQL pool SKU */ + sku?: Sku; + /** Maximum size in bytes */ + maxSizeBytes?: number; + /** Collation mode */ + collation?: string; + /** Source database to create from */ + sourceDatabaseId?: string; + /** Backup database to restore from */ + recoverableDatabaseId?: string; + /** Resource state */ + provisioningState?: string; + /** Resource status */ + status?: string; + /** Snapshot time to restore */ + restorePointInTime?: string; + /** What is this? */ + createMode?: string; + /** Date the SQL pool was created */ + creationDate?: Date; +}; + /** A workspace */ export type Workspace = TrackedResource & { /** Identity of the workspace */ @@ -8217,73 +8484,6 @@ export type Workspace = TrackedResource & { readonly adlaResourceId?: string; }; -/** A SQL Analytics pool */ -export type SqlPool = TrackedResource & { - /** SQL pool SKU */ - sku?: Sku; - /** Maximum size in bytes */ - maxSizeBytes?: number; - /** Collation mode */ - collation?: string; - /** Source database to create from */ - sourceDatabaseId?: string; - /** Backup database to restore from */ - recoverableDatabaseId?: string; - /** Resource state */ - provisioningState?: string; - /** Resource status */ - status?: string; - /** Snapshot time to restore */ - restorePointInTime?: string; - /** What is this? */ - createMode?: string; - /** Date the SQL pool was created */ - creationDate?: Date; -}; - -/** A Big Data pool */ -export type BigDataPoolResourceInfo = TrackedResource & { - /** The state of the Big Data pool. */ - provisioningState?: string; - /** Auto-scaling properties */ - autoScale?: AutoScaleProperties; - /** The time when the Big Data pool was created. */ - creationDate?: Date; - /** Auto-pausing properties */ - autoPause?: AutoPauseProperties; - /** Whether compute isolation is required or not. */ - isComputeIsolationEnabled?: boolean; - /** Whether session level packages enabled. */ - sessionLevelPackagesEnabled?: boolean; - /** The cache size */ - cacheSize?: number; - /** Dynamic Executor Allocation */ - dynamicExecutorAllocation?: DynamicExecutorAllocation; - /** The Spark events folder */ - sparkEventsFolder?: string; - /** The number of nodes in the Big Data pool. */ - nodeCount?: number; - /** Library version requirements */ - libraryRequirements?: LibraryRequirements; - /** List of custom libraries/packages associated with the spark pool. */ - customLibraries?: LibraryInfo[]; - /** Spark configuration file to specify additional properties */ - sparkConfigProperties?: LibraryRequirements; - /** The Apache Spark version. */ - sparkVersion?: string; - /** The default folder where Spark logs will be written. */ - defaultSparkLogFolder?: string; - /** The level of compute power that each node in the Big Data pool has. */ - nodeSize?: NodeSize; - /** The kind of nodes that the Big Data pool provides. */ - nodeSizeFamily?: NodeSizeFamily; - /** - * The time when the Big Data pool was updated successfully. - * NOTE: This property will not be serialized. It can only be populated by the server. - */ - readonly lastSucceededTimestamp?: Date; -}; - /** A private endpoint connection */ export type PrivateEndpointConnection = ProxyResource & { /** The private endpoint which the connection belongs to. */ @@ -9117,6 +9317,26 @@ export type SqlServerSource = TabularSource & { partitionSettings?: SqlPartitionSettings; }; +/** A copy activity Amazon RDS for SQL Server source. */ +export type AmazonRdsForSqlServerSource = TabularSource & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + type: "AmazonRdsForSqlServerSource"; + /** SQL reader query. Type: string (or Expression with resultType string). */ + sqlReaderQuery?: any; + /** Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). */ + sqlReaderStoredProcedureName?: any; + /** Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** Which additional types to produce. */ + produceAdditionalTypes?: any; + /** The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". */ + partitionOption?: any; + /** The settings that will be leveraged for Sql source partitioning. */ + partitionSettings?: SqlPartitionSettings; +}; + /** A copy activity Azure SQL source. */ export type AzureSqlSource = TabularSource & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -9501,10 +9721,16 @@ export type TumblingWindowTriggerDependencyReference = TriggerDependencyReferenc size?: string; }; -/** Linked service resource type. */ -export type LinkedServiceResource = SubResource & { - /** Properties of linked service. */ - properties: LinkedServiceUnion; +/** Spark Configuration resource type. */ +export type SparkConfigurationResource = SubResource & { + /** Properties of Spark Configuration. */ + properties: SparkConfiguration; +}; + +/** Data flow resource type. */ +export type DataFlowResource = SubResource & { + /** Data flow properties. */ + properties: DataFlowUnion; }; /** Dataset resource type. */ @@ -9513,6 +9739,24 @@ export type DatasetResource = SubResource & { properties: DatasetUnion; }; +/** Integration runtime resource type. */ +export type IntegrationRuntimeResource = SubResource & { + /** Integration runtime properties. */ + properties: IntegrationRuntimeUnion; +}; + +/** Library response details */ +export type LibraryResource = SubResource & { + /** Library/package properties. */ + properties: LibraryResourceProperties; +}; + +/** Linked service resource type. */ +export type LinkedServiceResource = SubResource & { + /** Properties of linked service. */ + properties: LinkedServiceUnion; +}; + /** Pipeline resource type. */ export type PipelineResource = SubResource & { /** Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -9535,34 +9779,16 @@ export type PipelineResource = SubResource & { folder?: PipelineFolder; }; -/** Trigger resource type. */ -export type TriggerResource = SubResource & { - /** Properties of the trigger. */ - properties: TriggerUnion; -}; - -/** Data flow resource type. */ -export type DataFlowResource = SubResource & { - /** Data flow properties. */ - properties: DataFlowUnion; -}; - /** Spark job definition resource type. */ export type SparkJobDefinitionResource = SubResource & { /** Properties of spark job definition. */ properties: SparkJobDefinition; }; -/** Integration runtime resource type. */ -export type IntegrationRuntimeResource = SubResource & { - /** Integration runtime properties. */ - properties: IntegrationRuntimeUnion; -}; - -/** Library response details */ -export type LibraryResource = SubResource & { - /** Library/package properties. */ - properties: LibraryResourceProperties; +/** Trigger resource type. */ +export type TriggerResource = SubResource & { + /** Properties of the trigger. */ + properties: TriggerUnion; }; /** RerunTrigger resource type. */ @@ -9583,19 +9809,77 @@ export interface DataFlowDebugSessionExecuteCommandHeaders { location?: string; } -/** Known values of {@link IntegrationRuntimeReferenceType} that the service accepts. */ -export enum KnownIntegrationRuntimeReferenceType { - IntegrationRuntimeReference = "IntegrationRuntimeReference" +/** Known values of {@link NodeSize} that the service accepts. */ +export enum KnownNodeSize { + None = "None", + Small = "Small", + Medium = "Medium", + Large = "Large", + XLarge = "XLarge", + XXLarge = "XXLarge", + XXXLarge = "XXXLarge" } /** - * Defines values for IntegrationRuntimeReferenceType. \ - * {@link KnownIntegrationRuntimeReferenceType} can be used interchangeably with IntegrationRuntimeReferenceType, + * Defines values for NodeSize. \ + * {@link KnownNodeSize} can be used interchangeably with NodeSize, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **IntegrationRuntimeReference** + * **None** \ + * **Small** \ + * **Medium** \ + * **Large** \ + * **XLarge** \ + * **XXLarge** \ + * **XXXLarge** */ -export type IntegrationRuntimeReferenceType = string; +export type NodeSize = string; + +/** Known values of {@link NodeSizeFamily} that the service accepts. */ +export enum KnownNodeSizeFamily { + None = "None", + MemoryOptimized = "MemoryOptimized" +} + +/** + * Defines values for NodeSizeFamily. \ + * {@link KnownNodeSizeFamily} can be used interchangeably with NodeSizeFamily, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **None** \ + * **MemoryOptimized** + */ +export type NodeSizeFamily = string; + +/** Known values of {@link IntegrationRuntimeType} that the service accepts. */ +export enum KnownIntegrationRuntimeType { + Managed = "Managed", + SelfHosted = "SelfHosted" +} + +/** + * Defines values for IntegrationRuntimeType. \ + * {@link KnownIntegrationRuntimeType} can be used interchangeably with IntegrationRuntimeType, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **Managed** \ + * **SelfHosted** + */ +export type IntegrationRuntimeType = string; + +/** Known values of {@link Type} that the service accepts. */ +export enum KnownType { + LinkedServiceReference = "LinkedServiceReference" +} + +/** + * Defines values for Type. \ + * {@link KnownType} can be used interchangeably with Type, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **LinkedServiceReference** + */ +export type Type = string; /** Known values of {@link ParameterType} that the service accepts. */ export enum KnownParameterType { @@ -9623,19 +9907,71 @@ export enum KnownParameterType { */ export type ParameterType = string; -/** Known values of {@link Type} that the service accepts. */ -export enum KnownType { - LinkedServiceReference = "LinkedServiceReference" +/** Known values of {@link IntegrationRuntimeReferenceType} that the service accepts. */ +export enum KnownIntegrationRuntimeReferenceType { + IntegrationRuntimeReference = "IntegrationRuntimeReference" } /** - * Defines values for Type. \ - * {@link KnownType} can be used interchangeably with Type, + * Defines values for IntegrationRuntimeReferenceType. \ + * {@link KnownIntegrationRuntimeReferenceType} can be used interchangeably with IntegrationRuntimeReferenceType, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **LinkedServiceReference** + * **IntegrationRuntimeReference** */ -export type Type = string; +export type IntegrationRuntimeReferenceType = string; + +/** Known values of {@link DataFlowDebugCommandType} that the service accepts. */ +export enum KnownDataFlowDebugCommandType { + ExecutePreviewQuery = "executePreviewQuery", + ExecuteStatisticsQuery = "executeStatisticsQuery", + ExecuteExpressionQuery = "executeExpressionQuery" +} + +/** + * Defines values for DataFlowDebugCommandType. \ + * {@link KnownDataFlowDebugCommandType} can be used interchangeably with DataFlowDebugCommandType, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **executePreviewQuery** \ + * **executeStatisticsQuery** \ + * **executeExpressionQuery** + */ +export type DataFlowDebugCommandType = string; + +/** Known values of {@link BigDataPoolReferenceType} that the service accepts. */ +export enum KnownBigDataPoolReferenceType { + BigDataPoolReference = "BigDataPoolReference" +} + +/** + * Defines values for BigDataPoolReferenceType. \ + * {@link KnownBigDataPoolReferenceType} can be used interchangeably with BigDataPoolReferenceType, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **BigDataPoolReference** + */ +export type BigDataPoolReferenceType = string; + +/** Known values of {@link CellOutputType} that the service accepts. */ +export enum KnownCellOutputType { + ExecuteResult = "execute_result", + DisplayData = "display_data", + Stream = "stream", + Error = "error" +} + +/** + * Defines values for CellOutputType. \ + * {@link KnownCellOutputType} can be used interchangeably with CellOutputType, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **execute_result** \ + * **display_data** \ + * **stream** \ + * **error** + */ +export type CellOutputType = string; /** Known values of {@link DependencyCondition} that the service accepts. */ export enum KnownDependencyCondition { @@ -9740,146 +10076,44 @@ export enum KnownRunQueryOrderByField { PipelineName = "PipelineName", Status = "Status", ActivityName = "ActivityName", - ActivityRunStart = "ActivityRunStart", - ActivityRunEnd = "ActivityRunEnd", - TriggerName = "TriggerName", - TriggerRunTimestamp = "TriggerRunTimestamp" -} - -/** - * Defines values for RunQueryOrderByField. \ - * {@link KnownRunQueryOrderByField} can be used interchangeably with RunQueryOrderByField, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **RunStart** \ - * **RunEnd** \ - * **PipelineName** \ - * **Status** \ - * **ActivityName** \ - * **ActivityRunStart** \ - * **ActivityRunEnd** \ - * **TriggerName** \ - * **TriggerRunTimestamp** - */ -export type RunQueryOrderByField = string; - -/** Known values of {@link RunQueryOrder} that the service accepts. */ -export enum KnownRunQueryOrder { - ASC = "ASC", - Desc = "DESC" -} - -/** - * Defines values for RunQueryOrder. \ - * {@link KnownRunQueryOrder} can be used interchangeably with RunQueryOrder, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **ASC** \ - * **DESC** - */ -export type RunQueryOrder = string; - -/** Known values of {@link TriggerRuntimeState} that the service accepts. */ -export enum KnownTriggerRuntimeState { - Started = "Started", - Stopped = "Stopped", - Disabled = "Disabled" -} - -/** - * Defines values for TriggerRuntimeState. \ - * {@link KnownTriggerRuntimeState} can be used interchangeably with TriggerRuntimeState, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **Started** \ - * **Stopped** \ - * **Disabled** - */ -export type TriggerRuntimeState = string; - -/** Known values of {@link EventSubscriptionStatus} that the service accepts. */ -export enum KnownEventSubscriptionStatus { - Enabled = "Enabled", - Provisioning = "Provisioning", - Deprovisioning = "Deprovisioning", - Disabled = "Disabled", - Unknown = "Unknown" -} - -/** - * Defines values for EventSubscriptionStatus. \ - * {@link KnownEventSubscriptionStatus} can be used interchangeably with EventSubscriptionStatus, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **Enabled** \ - * **Provisioning** \ - * **Deprovisioning** \ - * **Disabled** \ - * **Unknown** - */ -export type EventSubscriptionStatus = string; - -/** Known values of {@link TriggerRunStatus} that the service accepts. */ -export enum KnownTriggerRunStatus { - Succeeded = "Succeeded", - Failed = "Failed", - Inprogress = "Inprogress" -} - -/** - * Defines values for TriggerRunStatus. \ - * {@link KnownTriggerRunStatus} can be used interchangeably with TriggerRunStatus, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **Succeeded** \ - * **Failed** \ - * **Inprogress** - */ -export type TriggerRunStatus = string; - -/** Known values of {@link SqlScriptType} that the service accepts. */ -export enum KnownSqlScriptType { - SqlQuery = "SqlQuery" -} - -/** - * Defines values for SqlScriptType. \ - * {@link KnownSqlScriptType} can be used interchangeably with SqlScriptType, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **SqlQuery** - */ -export type SqlScriptType = string; - -/** Known values of {@link SqlConnectionType} that the service accepts. */ -export enum KnownSqlConnectionType { - SqlOnDemand = "SqlOnDemand", - SqlPool = "SqlPool" + ActivityRunStart = "ActivityRunStart", + ActivityRunEnd = "ActivityRunEnd", + TriggerName = "TriggerName", + TriggerRunTimestamp = "TriggerRunTimestamp" } /** - * Defines values for SqlConnectionType. \ - * {@link KnownSqlConnectionType} can be used interchangeably with SqlConnectionType, + * Defines values for RunQueryOrderByField. \ + * {@link KnownRunQueryOrderByField} can be used interchangeably with RunQueryOrderByField, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **SqlOnDemand** \ - * **SqlPool** + * **RunStart** \ + * **RunEnd** \ + * **PipelineName** \ + * **Status** \ + * **ActivityName** \ + * **ActivityRunStart** \ + * **ActivityRunEnd** \ + * **TriggerName** \ + * **TriggerRunTimestamp** */ -export type SqlConnectionType = string; +export type RunQueryOrderByField = string; -/** Known values of {@link BigDataPoolReferenceType} that the service accepts. */ -export enum KnownBigDataPoolReferenceType { - BigDataPoolReference = "BigDataPoolReference" +/** Known values of {@link RunQueryOrder} that the service accepts. */ +export enum KnownRunQueryOrder { + ASC = "ASC", + Desc = "DESC" } /** - * Defines values for BigDataPoolReferenceType. \ - * {@link KnownBigDataPoolReferenceType} can be used interchangeably with BigDataPoolReferenceType, + * Defines values for RunQueryOrder. \ + * {@link KnownRunQueryOrder} can be used interchangeably with RunQueryOrder, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **BigDataPoolReference** + * **ASC** \ + * **DESC** */ -export type BigDataPoolReferenceType = string; +export type RunQueryOrder = string; /** Known values of {@link SparkJobType} that the service accepts. */ export enum KnownSparkJobType { @@ -9981,83 +10215,127 @@ export enum KnownSparkErrorSource { */ export type SparkErrorSource = string; -/** Known values of {@link CellOutputType} that the service accepts. */ -export enum KnownCellOutputType { - ExecuteResult = "execute_result", - DisplayData = "display_data", - Stream = "stream", - Error = "error" +/** Known values of {@link LivyStates} that the service accepts. */ +export enum KnownLivyStates { + NotStarted = "not_started", + Starting = "starting", + Idle = "idle", + Busy = "busy", + ShuttingDown = "shutting_down", + Error = "error", + Dead = "dead", + Killed = "killed", + Success = "success", + Running = "running", + Recovering = "recovering" } /** - * Defines values for CellOutputType. \ - * {@link KnownCellOutputType} can be used interchangeably with CellOutputType, + * Defines values for LivyStates. \ + * {@link KnownLivyStates} can be used interchangeably with LivyStates, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **execute_result** \ - * **display_data** \ - * **stream** \ - * **error** + * **not_started** \ + * **starting** \ + * **idle** \ + * **busy** \ + * **shutting_down** \ + * **error** \ + * **dead** \ + * **killed** \ + * **success** \ + * **running** \ + * **recovering** */ -export type CellOutputType = string; +export type LivyStates = string; -/** Known values of {@link NodeSize} that the service accepts. */ -export enum KnownNodeSize { - None = "None", - Small = "Small", - Medium = "Medium", - Large = "Large", - XLarge = "XLarge", - XXLarge = "XXLarge", - XXXLarge = "XXXLarge" +/** Known values of {@link SqlScriptType} that the service accepts. */ +export enum KnownSqlScriptType { + SqlQuery = "SqlQuery" } /** - * Defines values for NodeSize. \ - * {@link KnownNodeSize} can be used interchangeably with NodeSize, + * Defines values for SqlScriptType. \ + * {@link KnownSqlScriptType} can be used interchangeably with SqlScriptType, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **None** \ - * **Small** \ - * **Medium** \ - * **Large** \ - * **XLarge** \ - * **XXLarge** \ - * **XXXLarge** + * **SqlQuery** */ -export type NodeSize = string; +export type SqlScriptType = string; -/** Known values of {@link NodeSizeFamily} that the service accepts. */ -export enum KnownNodeSizeFamily { - None = "None", - MemoryOptimized = "MemoryOptimized" +/** Known values of {@link SqlConnectionType} that the service accepts. */ +export enum KnownSqlConnectionType { + SqlOnDemand = "SqlOnDemand", + SqlPool = "SqlPool" } /** - * Defines values for NodeSizeFamily. \ - * {@link KnownNodeSizeFamily} can be used interchangeably with NodeSizeFamily, + * Defines values for SqlConnectionType. \ + * {@link KnownSqlConnectionType} can be used interchangeably with SqlConnectionType, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **None** \ - * **MemoryOptimized** + * **SqlOnDemand** \ + * **SqlPool** */ -export type NodeSizeFamily = string; +export type SqlConnectionType = string; -/** Known values of {@link IntegrationRuntimeType} that the service accepts. */ -export enum KnownIntegrationRuntimeType { - Managed = "Managed", - SelfHosted = "SelfHosted" +/** Known values of {@link TriggerRuntimeState} that the service accepts. */ +export enum KnownTriggerRuntimeState { + Started = "Started", + Stopped = "Stopped", + Disabled = "Disabled" } /** - * Defines values for IntegrationRuntimeType. \ - * {@link KnownIntegrationRuntimeType} can be used interchangeably with IntegrationRuntimeType, + * Defines values for TriggerRuntimeState. \ + * {@link KnownTriggerRuntimeState} can be used interchangeably with TriggerRuntimeState, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **Managed** \ - * **SelfHosted** + * **Started** \ + * **Stopped** \ + * **Disabled** */ -export type IntegrationRuntimeType = string; +export type TriggerRuntimeState = string; + +/** Known values of {@link EventSubscriptionStatus} that the service accepts. */ +export enum KnownEventSubscriptionStatus { + Enabled = "Enabled", + Provisioning = "Provisioning", + Deprovisioning = "Deprovisioning", + Disabled = "Disabled", + Unknown = "Unknown" +} + +/** + * Defines values for EventSubscriptionStatus. \ + * {@link KnownEventSubscriptionStatus} can be used interchangeably with EventSubscriptionStatus, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **Enabled** \ + * **Provisioning** \ + * **Deprovisioning** \ + * **Disabled** \ + * **Unknown** + */ +export type EventSubscriptionStatus = string; + +/** Known values of {@link TriggerRunStatus} that the service accepts. */ +export enum KnownTriggerRunStatus { + Succeeded = "Succeeded", + Failed = "Failed", + Inprogress = "Inprogress" +} + +/** + * Defines values for TriggerRunStatus. \ + * {@link KnownTriggerRunStatus} can be used interchangeably with TriggerRunStatus, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **Succeeded** \ + * **Failed** \ + * **Inprogress** + */ +export type TriggerRunStatus = string; /** Known values of {@link ExpressionType} that the service accepts. */ export enum KnownExpressionType { @@ -10096,180 +10374,66 @@ export enum KnownDatasetReferenceType { * Defines values for DatasetReferenceType. \ * {@link KnownDatasetReferenceType} can be used interchangeably with DatasetReferenceType, * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **DatasetReference** - */ -export type DatasetReferenceType = string; - -/** Known values of {@link DataFlowReferenceType} that the service accepts. */ -export enum KnownDataFlowReferenceType { - DataFlowReference = "DataFlowReference" -} - -/** - * Defines values for DataFlowReferenceType. \ - * {@link KnownDataFlowReferenceType} can be used interchangeably with DataFlowReferenceType, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **DataFlowReference** - */ -export type DataFlowReferenceType = string; - -/** Known values of {@link NotebookReferenceType} that the service accepts. */ -export enum KnownNotebookReferenceType { - NotebookReference = "NotebookReference" -} - -/** - * Defines values for NotebookReferenceType. \ - * {@link KnownNotebookReferenceType} can be used interchangeably with NotebookReferenceType, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **NotebookReference** - */ -export type NotebookReferenceType = string; - -/** Known values of {@link SparkJobReferenceType} that the service accepts. */ -export enum KnownSparkJobReferenceType { - SparkJobDefinitionReference = "SparkJobDefinitionReference" -} - -/** - * Defines values for SparkJobReferenceType. \ - * {@link KnownSparkJobReferenceType} can be used interchangeably with SparkJobReferenceType, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **SparkJobDefinitionReference** - */ -export type SparkJobReferenceType = string; - -/** Known values of {@link SqlPoolReferenceType} that the service accepts. */ -export enum KnownSqlPoolReferenceType { - SqlPoolReference = "SqlPoolReference" -} - -/** - * Defines values for SqlPoolReferenceType. \ - * {@link KnownSqlPoolReferenceType} can be used interchangeably with SqlPoolReferenceType, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **SqlPoolReference** - */ -export type SqlPoolReferenceType = string; - -/** Known values of {@link JsonFormatFilePattern} that the service accepts. */ -export enum KnownJsonFormatFilePattern { - SetOfObjects = "setOfObjects", - ArrayOfObjects = "arrayOfObjects" -} - -/** - * Defines values for JsonFormatFilePattern. \ - * {@link KnownJsonFormatFilePattern} can be used interchangeably with JsonFormatFilePattern, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **setOfObjects** \ - * **arrayOfObjects** - */ -export type JsonFormatFilePattern = string; - -/** Known values of {@link CompressionCodec} that the service accepts. */ -export enum KnownCompressionCodec { - Bzip2 = "bzip2", - Gzip = "gzip", - Deflate = "deflate", - ZipDeflate = "zipDeflate", - Snappy = "snappy", - Lz4 = "lz4", - Tar = "tar", - TarGZip = "tarGZip" -} - -/** - * Defines values for CompressionCodec. \ - * {@link KnownCompressionCodec} can be used interchangeably with CompressionCodec, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **bzip2** \ - * **gzip** \ - * **deflate** \ - * **zipDeflate** \ - * **snappy** \ - * **lz4** \ - * **tar** \ - * **tarGZip** + * ### Known values supported by the service + * **DatasetReference** */ -export type CompressionCodec = string; +export type DatasetReferenceType = string; -/** Known values of {@link OrcCompressionCodec} that the service accepts. */ -export enum KnownOrcCompressionCodec { - None = "none", - Zlib = "zlib", - Snappy = "snappy", - Lzo = "lzo" +/** Known values of {@link DataFlowReferenceType} that the service accepts. */ +export enum KnownDataFlowReferenceType { + DataFlowReference = "DataFlowReference" } /** - * Defines values for OrcCompressionCodec. \ - * {@link KnownOrcCompressionCodec} can be used interchangeably with OrcCompressionCodec, + * Defines values for DataFlowReferenceType. \ + * {@link KnownDataFlowReferenceType} can be used interchangeably with DataFlowReferenceType, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **none** \ - * **zlib** \ - * **snappy** \ - * **lzo** + * **DataFlowReference** */ -export type OrcCompressionCodec = string; +export type DataFlowReferenceType = string; -/** Known values of {@link DynamicsDeploymentType} that the service accepts. */ -export enum KnownDynamicsDeploymentType { - Online = "Online", - OnPremisesWithIfd = "OnPremisesWithIfd" +/** Known values of {@link NotebookReferenceType} that the service accepts. */ +export enum KnownNotebookReferenceType { + NotebookReference = "NotebookReference" } /** - * Defines values for DynamicsDeploymentType. \ - * {@link KnownDynamicsDeploymentType} can be used interchangeably with DynamicsDeploymentType, + * Defines values for NotebookReferenceType. \ + * {@link KnownNotebookReferenceType} can be used interchangeably with NotebookReferenceType, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **Online** \ - * **OnPremisesWithIfd** + * **NotebookReference** */ -export type DynamicsDeploymentType = string; +export type NotebookReferenceType = string; -/** Known values of {@link DynamicsAuthenticationType} that the service accepts. */ -export enum KnownDynamicsAuthenticationType { - Office365 = "Office365", - Ifd = "Ifd", - AADServicePrincipal = "AADServicePrincipal" +/** Known values of {@link SparkJobReferenceType} that the service accepts. */ +export enum KnownSparkJobReferenceType { + SparkJobDefinitionReference = "SparkJobDefinitionReference" } /** - * Defines values for DynamicsAuthenticationType. \ - * {@link KnownDynamicsAuthenticationType} can be used interchangeably with DynamicsAuthenticationType, + * Defines values for SparkJobReferenceType. \ + * {@link KnownSparkJobReferenceType} can be used interchangeably with SparkJobReferenceType, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **Office365** \ - * **Ifd** \ - * **AADServicePrincipal** + * **SparkJobDefinitionReference** */ -export type DynamicsAuthenticationType = string; +export type SparkJobReferenceType = string; -/** Known values of {@link DynamicsServicePrincipalCredentialType} that the service accepts. */ -export enum KnownDynamicsServicePrincipalCredentialType { - ServicePrincipalKey = "ServicePrincipalKey", - ServicePrincipalCert = "ServicePrincipalCert" +/** Known values of {@link SqlPoolReferenceType} that the service accepts. */ +export enum KnownSqlPoolReferenceType { + SqlPoolReference = "SqlPoolReference" } /** - * Defines values for DynamicsServicePrincipalCredentialType. \ - * {@link KnownDynamicsServicePrincipalCredentialType} can be used interchangeably with DynamicsServicePrincipalCredentialType, + * Defines values for SqlPoolReferenceType. \ + * {@link KnownSqlPoolReferenceType} can be used interchangeably with SqlPoolReferenceType, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **ServicePrincipalKey** \ - * **ServicePrincipalCert** + * **SqlPoolReference** */ -export type DynamicsServicePrincipalCredentialType = string; +export type SqlPoolReferenceType = string; /** Known values of {@link SybaseAuthenticationType} that the service accepts. */ export enum KnownSybaseAuthenticationType { @@ -10691,24 +10855,6 @@ export enum KnownSparkAuthenticationType { */ export type SparkAuthenticationType = string; -/** Known values of {@link HdiNodeTypes} that the service accepts. */ -export enum KnownHdiNodeTypes { - Headnode = "Headnode", - Workernode = "Workernode", - Zookeeper = "Zookeeper" -} - -/** - * Defines values for HdiNodeTypes. \ - * {@link KnownHdiNodeTypes} can be used interchangeably with HdiNodeTypes, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **Headnode** \ - * **Workernode** \ - * **Zookeeper** - */ -export type HdiNodeTypes = string; - /** Known values of {@link GoogleAdWordsAuthenticationType} that the service accepts. */ export enum KnownGoogleAdWordsAuthenticationType { ServiceAuthentication = "ServiceAuthentication", @@ -10725,22 +10871,6 @@ export enum KnownGoogleAdWordsAuthenticationType { */ export type GoogleAdWordsAuthenticationType = string; -/** Known values of {@link JsonWriteFilePattern} that the service accepts. */ -export enum KnownJsonWriteFilePattern { - SetOfObjects = "setOfObjects", - ArrayOfObjects = "arrayOfObjects" -} - -/** - * Defines values for JsonWriteFilePattern. \ - * {@link KnownJsonWriteFilePattern} can be used interchangeably with JsonWriteFilePattern, - * this enum contains the known values that the service supports. - * ### Known values supported by the service - * **setOfObjects** \ - * **arrayOfObjects** - */ -export type JsonWriteFilePattern = string; - /** Known values of {@link SalesforceSourceReadBehavior} that the service accepts. */ export enum KnownSalesforceSourceReadBehavior { Query = "Query", @@ -11272,84 +11402,238 @@ export enum KnownIntegrationRuntimeEntityReferenceType { * {@link KnownIntegrationRuntimeEntityReferenceType} can be used interchangeably with IntegrationRuntimeEntityReferenceType, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **IntegrationRuntimeReference** \ - * **LinkedServiceReference** + * **IntegrationRuntimeReference** \ + * **LinkedServiceReference** + */ +export type IntegrationRuntimeEntityReferenceType = string; + +/** Known values of {@link IntegrationRuntimeEdition} that the service accepts. */ +export enum KnownIntegrationRuntimeEdition { + Standard = "Standard", + Enterprise = "Enterprise" +} + +/** + * Defines values for IntegrationRuntimeEdition. \ + * {@link KnownIntegrationRuntimeEdition} can be used interchangeably with IntegrationRuntimeEdition, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **Standard** \ + * **Enterprise** + */ +export type IntegrationRuntimeEdition = string; + +/** Known values of {@link JsonFormatFilePattern} that the service accepts. */ +export enum KnownJsonFormatFilePattern { + SetOfObjects = "setOfObjects", + ArrayOfObjects = "arrayOfObjects" +} + +/** + * Defines values for JsonFormatFilePattern. \ + * {@link KnownJsonFormatFilePattern} can be used interchangeably with JsonFormatFilePattern, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **setOfObjects** \ + * **arrayOfObjects** + */ +export type JsonFormatFilePattern = string; + +/** Known values of {@link DatasetCompressionLevel} that the service accepts. */ +export enum KnownDatasetCompressionLevel { + Optimal = "Optimal", + Fastest = "Fastest" +} + +/** + * Defines values for DatasetCompressionLevel. \ + * {@link KnownDatasetCompressionLevel} can be used interchangeably with DatasetCompressionLevel, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **Optimal** \ + * **Fastest** + */ +export type DatasetCompressionLevel = string; + +/** Known values of {@link AvroCompressionCodec} that the service accepts. */ +export enum KnownAvroCompressionCodec { + None = "none", + Deflate = "deflate", + Snappy = "snappy", + Xz = "xz", + Bzip2 = "bzip2" +} + +/** + * Defines values for AvroCompressionCodec. \ + * {@link KnownAvroCompressionCodec} can be used interchangeably with AvroCompressionCodec, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **none** \ + * **deflate** \ + * **snappy** \ + * **xz** \ + * **bzip2** + */ +export type AvroCompressionCodec = string; + +/** Known values of {@link ParquetCompressionCodecEnum} that the service accepts. */ +export enum KnownParquetCompressionCodecEnum { + None = "none", + Gzip = "gzip", + Snappy = "snappy", + Lzo = "lzo" +} + +/** + * Defines values for ParquetCompressionCodecEnum. \ + * {@link KnownParquetCompressionCodecEnum} can be used interchangeably with ParquetCompressionCodecEnum, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **none** \ + * **gzip** \ + * **snappy** \ + * **lzo** + */ +export type ParquetCompressionCodecEnum = string; + +/** Known values of {@link CompressionCodec} that the service accepts. */ +export enum KnownCompressionCodec { + None = "none", + Lzo = "lzo", + Bzip2 = "bzip2", + Gzip = "gzip", + Deflate = "deflate", + ZipDeflate = "zipDeflate", + Snappy = "snappy", + Lz4 = "lz4", + Tar = "tar", + TarGZip = "tarGZip" +} + +/** + * Defines values for CompressionCodec. \ + * {@link KnownCompressionCodec} can be used interchangeably with CompressionCodec, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **none** \ + * **lzo** \ + * **bzip2** \ + * **gzip** \ + * **deflate** \ + * **zipDeflate** \ + * **snappy** \ + * **lz4** \ + * **tar** \ + * **tarGZip** + */ +export type CompressionCodec = string; + +/** Known values of {@link OrcCompressionCodec} that the service accepts. */ +export enum KnownOrcCompressionCodec { + None = "none", + Zlib = "zlib", + Snappy = "snappy", + Lzo = "lzo" +} + +/** + * Defines values for OrcCompressionCodec. \ + * {@link KnownOrcCompressionCodec} can be used interchangeably with OrcCompressionCodec, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **none** \ + * **zlib** \ + * **snappy** \ + * **lzo** + */ +export type OrcCompressionCodec = string; + +/** Known values of {@link DynamicsDeploymentType} that the service accepts. */ +export enum KnownDynamicsDeploymentType { + Online = "Online", + OnPremisesWithIfd = "OnPremisesWithIfd" +} + +/** + * Defines values for DynamicsDeploymentType. \ + * {@link KnownDynamicsDeploymentType} can be used interchangeably with DynamicsDeploymentType, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **Online** \ + * **OnPremisesWithIfd** */ -export type IntegrationRuntimeEntityReferenceType = string; +export type DynamicsDeploymentType = string; -/** Known values of {@link IntegrationRuntimeEdition} that the service accepts. */ -export enum KnownIntegrationRuntimeEdition { - Standard = "Standard", - Enterprise = "Enterprise" +/** Known values of {@link DynamicsAuthenticationType} that the service accepts. */ +export enum KnownDynamicsAuthenticationType { + Office365 = "Office365", + Ifd = "Ifd", + AADServicePrincipal = "AADServicePrincipal" } /** - * Defines values for IntegrationRuntimeEdition. \ - * {@link KnownIntegrationRuntimeEdition} can be used interchangeably with IntegrationRuntimeEdition, + * Defines values for DynamicsAuthenticationType. \ + * {@link KnownDynamicsAuthenticationType} can be used interchangeably with DynamicsAuthenticationType, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **Standard** \ - * **Enterprise** + * **Office365** \ + * **Ifd** \ + * **AADServicePrincipal** */ -export type IntegrationRuntimeEdition = string; +export type DynamicsAuthenticationType = string; -/** Known values of {@link DatasetCompressionLevel} that the service accepts. */ -export enum KnownDatasetCompressionLevel { - Optimal = "Optimal", - Fastest = "Fastest" +/** Known values of {@link HdiNodeTypes} that the service accepts. */ +export enum KnownHdiNodeTypes { + Headnode = "Headnode", + Workernode = "Workernode", + Zookeeper = "Zookeeper" } /** - * Defines values for DatasetCompressionLevel. \ - * {@link KnownDatasetCompressionLevel} can be used interchangeably with DatasetCompressionLevel, + * Defines values for HdiNodeTypes. \ + * {@link KnownHdiNodeTypes} can be used interchangeably with HdiNodeTypes, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **Optimal** \ - * **Fastest** + * **Headnode** \ + * **Workernode** \ + * **Zookeeper** */ -export type DatasetCompressionLevel = string; +export type HdiNodeTypes = string; -/** Known values of {@link AvroCompressionCodec} that the service accepts. */ -export enum KnownAvroCompressionCodec { - None = "none", - Deflate = "deflate", - Snappy = "snappy", - Xz = "xz", - Bzip2 = "bzip2" +/** Known values of {@link JsonWriteFilePattern} that the service accepts. */ +export enum KnownJsonWriteFilePattern { + SetOfObjects = "setOfObjects", + ArrayOfObjects = "arrayOfObjects" } /** - * Defines values for AvroCompressionCodec. \ - * {@link KnownAvroCompressionCodec} can be used interchangeably with AvroCompressionCodec, + * Defines values for JsonWriteFilePattern. \ + * {@link KnownJsonWriteFilePattern} can be used interchangeably with JsonWriteFilePattern, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **none** \ - * **deflate** \ - * **snappy** \ - * **xz** \ - * **bzip2** + * **setOfObjects** \ + * **arrayOfObjects** */ -export type AvroCompressionCodec = string; +export type JsonWriteFilePattern = string; -/** Known values of {@link ParquetCompressionCodecEnum} that the service accepts. */ -export enum KnownParquetCompressionCodecEnum { - None = "none", - Gzip = "gzip", - Snappy = "snappy", - Lzo = "lzo" +/** Known values of {@link AmazonRdsForOraclePartitionOption} that the service accepts. */ +export enum KnownAmazonRdsForOraclePartitionOption { + None = "None", + PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable", + DynamicRange = "DynamicRange" } /** - * Defines values for ParquetCompressionCodecEnum. \ - * {@link KnownParquetCompressionCodecEnum} can be used interchangeably with ParquetCompressionCodecEnum, + * Defines values for AmazonRdsForOraclePartitionOption. \ + * {@link KnownAmazonRdsForOraclePartitionOption} can be used interchangeably with AmazonRdsForOraclePartitionOption, * this enum contains the known values that the service supports. * ### Known values supported by the service - * **none** \ - * **gzip** \ - * **snappy** \ - * **lzo** + * **None** \ + * **PhysicalPartitionsOfTable** \ + * **DynamicRange** */ -export type ParquetCompressionCodecEnum = string; +export type AmazonRdsForOraclePartitionOption = string; /** Known values of {@link CopyBehaviorType} that the service accepts. */ export enum KnownCopyBehaviorType { @@ -11399,38 +11683,40 @@ export type DayOfWeek = | "Saturday"; /** Optional parameters. */ -export interface LinkedServiceOperationsGetLinkedServicesByWorkspaceOptionalParams +export interface KqlScriptsGetAllOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getLinkedServicesByWorkspace operation. */ -export type LinkedServiceOperationsGetLinkedServicesByWorkspaceResponse = LinkedServiceListResponse; +/** Contains response data for the getAll operation. */ +export type KqlScriptsGetAllResponse = KqlScriptsResourceCollectionResponse; + +/** Optional parameters. */ +export interface KqlScriptsGetAllNextOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the getAllNext operation. */ +export type KqlScriptsGetAllNextResponse = KqlScriptsResourceCollectionResponse; /** Optional parameters. */ -export interface LinkedServiceOperationsCreateOrUpdateLinkedServiceOptionalParams +export interface KqlScriptCreateOrUpdateOptionalParams extends coreClient.OperationOptions { - /** ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ - ifMatch?: string; /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } -/** Contains response data for the createOrUpdateLinkedService operation. */ -export type LinkedServiceOperationsCreateOrUpdateLinkedServiceResponse = LinkedServiceResource; +/** Contains response data for the createOrUpdate operation. */ +export type KqlScriptCreateOrUpdateResponse = KqlScriptResource; /** Optional parameters. */ -export interface LinkedServiceOperationsGetLinkedServiceOptionalParams - extends coreClient.OperationOptions { - /** ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ - ifNoneMatch?: string; -} +export interface KqlScriptGetByNameOptionalParams + extends coreClient.OperationOptions {} -/** Contains response data for the getLinkedService operation. */ -export type LinkedServiceOperationsGetLinkedServiceResponse = LinkedServiceResource; +/** Contains response data for the getByName operation. */ +export type KqlScriptGetByNameResponse = KqlScriptResource; /** Optional parameters. */ -export interface LinkedServiceOperationsDeleteLinkedServiceOptionalParams +export interface KqlScriptDeleteByNameOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11439,7 +11725,7 @@ export interface LinkedServiceOperationsDeleteLinkedServiceOptionalParams } /** Optional parameters. */ -export interface LinkedServiceOperationsRenameLinkedServiceOptionalParams +export interface KqlScriptRenameOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11448,23 +11734,16 @@ export interface LinkedServiceOperationsRenameLinkedServiceOptionalParams } /** Optional parameters. */ -export interface LinkedServiceOperationsGetLinkedServicesByWorkspaceNextOptionalParams - extends coreClient.OperationOptions {} - -/** Contains response data for the getLinkedServicesByWorkspaceNext operation. */ -export type LinkedServiceOperationsGetLinkedServicesByWorkspaceNextResponse = LinkedServiceListResponse; - -/** Optional parameters. */ -export interface DatasetOperationsGetDatasetsByWorkspaceOptionalParams +export interface SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getDatasetsByWorkspace operation. */ -export type DatasetOperationsGetDatasetsByWorkspaceResponse = DatasetListResponse; +/** Contains response data for the getSparkConfigurationsByWorkspace operation. */ +export type SparkConfigurationGetSparkConfigurationsByWorkspaceResponse = SparkConfigurationListResponse; /** Optional parameters. */ -export interface DatasetOperationsCreateOrUpdateDatasetOptionalParams +export interface SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams extends coreClient.OperationOptions { - /** ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ + /** ETag of the sparkConfiguration entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11472,21 +11751,21 @@ export interface DatasetOperationsCreateOrUpdateDatasetOptionalParams resumeFrom?: string; } -/** Contains response data for the createOrUpdateDataset operation. */ -export type DatasetOperationsCreateOrUpdateDatasetResponse = DatasetResource; +/** Contains response data for the createOrUpdateSparkConfiguration operation. */ +export type SparkConfigurationCreateOrUpdateSparkConfigurationResponse = SparkConfigurationResource; /** Optional parameters. */ -export interface DatasetOperationsGetDatasetOptionalParams +export interface SparkConfigurationGetSparkConfigurationOptionalParams extends coreClient.OperationOptions { - /** ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ + /** ETag of the sparkConfiguration entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } -/** Contains response data for the getDataset operation. */ -export type DatasetOperationsGetDatasetResponse = DatasetResource; +/** Contains response data for the getSparkConfiguration operation. */ +export type SparkConfigurationGetSparkConfigurationResponse = SparkConfigurationResource; /** Optional parameters. */ -export interface DatasetOperationsDeleteDatasetOptionalParams +export interface SparkConfigurationDeleteSparkConfigurationOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11495,7 +11774,7 @@ export interface DatasetOperationsDeleteDatasetOptionalParams } /** Optional parameters. */ -export interface DatasetOperationsRenameDatasetOptionalParams +export interface SparkConfigurationRenameSparkConfigurationOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11504,23 +11783,30 @@ export interface DatasetOperationsRenameDatasetOptionalParams } /** Optional parameters. */ -export interface DatasetOperationsGetDatasetsByWorkspaceNextOptionalParams +export interface SparkConfigurationGetSparkConfigurationsByWorkspaceNextOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getDatasetsByWorkspaceNext operation. */ -export type DatasetOperationsGetDatasetsByWorkspaceNextResponse = DatasetListResponse; +/** Contains response data for the getSparkConfigurationsByWorkspaceNext operation. */ +export type SparkConfigurationGetSparkConfigurationsByWorkspaceNextResponse = SparkConfigurationListResponse; /** Optional parameters. */ -export interface PipelineOperationsGetPipelinesByWorkspaceOptionalParams +export interface BigDataPoolsListOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getPipelinesByWorkspace operation. */ -export type PipelineOperationsGetPipelinesByWorkspaceResponse = PipelineListResponse; +/** Contains response data for the list operation. */ +export type BigDataPoolsListResponse = BigDataPoolResourceInfoListResult; + +/** Optional parameters. */ +export interface BigDataPoolsGetOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the get operation. */ +export type BigDataPoolsGetResponse = BigDataPoolResourceInfo; /** Optional parameters. */ -export interface PipelineOperationsCreateOrUpdatePipelineOptionalParams +export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreClient.OperationOptions { - /** ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ + /** ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11528,21 +11814,21 @@ export interface PipelineOperationsCreateOrUpdatePipelineOptionalParams resumeFrom?: string; } -/** Contains response data for the createOrUpdatePipeline operation. */ -export type PipelineOperationsCreateOrUpdatePipelineResponse = PipelineResource; +/** Contains response data for the createOrUpdateDataFlow operation. */ +export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource; /** Optional parameters. */ -export interface PipelineOperationsGetPipelineOptionalParams +export interface DataFlowGetDataFlowOptionalParams extends coreClient.OperationOptions { - /** ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ + /** ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } -/** Contains response data for the getPipeline operation. */ -export type PipelineOperationsGetPipelineResponse = PipelineResource; +/** Contains response data for the getDataFlow operation. */ +export type DataFlowGetDataFlowResponse = DataFlowResource; /** Optional parameters. */ -export interface PipelineOperationsDeletePipelineOptionalParams +export interface DataFlowDeleteDataFlowOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11551,7 +11837,7 @@ export interface PipelineOperationsDeletePipelineOptionalParams } /** Optional parameters. */ -export interface PipelineOperationsRenamePipelineOptionalParams +export interface DataFlowRenameDataFlowOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11560,67 +11846,79 @@ export interface PipelineOperationsRenamePipelineOptionalParams } /** Optional parameters. */ -export interface PipelineOperationsCreatePipelineRunOptionalParams - extends coreClient.OperationOptions { - /** Parameters of the pipeline run. These parameters will be used only if the runId is not specified. */ - parameters?: { [propertyName: string]: any }; - /** The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run. */ - referencePipelineRunId?: string; - /** Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId. */ - isRecovery?: boolean; - /** In recovery mode, the rerun will start from this activity. If not specified, all activities will run. */ - startActivityName?: string; -} +export interface DataFlowGetDataFlowsByWorkspaceOptionalParams + extends coreClient.OperationOptions {} -/** Contains response data for the createPipelineRun operation. */ -export type PipelineOperationsCreatePipelineRunResponse = CreateRunResponse; +/** Contains response data for the getDataFlowsByWorkspace operation. */ +export type DataFlowGetDataFlowsByWorkspaceResponse = DataFlowListResponse; /** Optional parameters. */ -export interface PipelineOperationsGetPipelinesByWorkspaceNextOptionalParams +export interface DataFlowGetDataFlowsByWorkspaceNextOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getPipelinesByWorkspaceNext operation. */ -export type PipelineOperationsGetPipelinesByWorkspaceNextResponse = PipelineListResponse; +/** Contains response data for the getDataFlowsByWorkspaceNext operation. */ +export type DataFlowGetDataFlowsByWorkspaceNextResponse = DataFlowListResponse; /** Optional parameters. */ -export interface PipelineRunOperationsQueryPipelineRunsByWorkspaceOptionalParams - extends coreClient.OperationOptions {} +export interface DataFlowDebugSessionCreateDataFlowDebugSessionOptionalParams + extends coreClient.OperationOptions { + /** Delay to wait until next poll, in milliseconds. */ + updateIntervalInMs?: number; + /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ + resumeFrom?: string; +} -/** Contains response data for the queryPipelineRunsByWorkspace operation. */ -export type PipelineRunOperationsQueryPipelineRunsByWorkspaceResponse = PipelineRunsQueryResponse; +/** Contains response data for the createDataFlowDebugSession operation. */ +export type DataFlowDebugSessionCreateDataFlowDebugSessionResponse = CreateDataFlowDebugSessionResponse; /** Optional parameters. */ -export interface PipelineRunOperationsGetPipelineRunOptionalParams +export interface DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getPipelineRun operation. */ -export type PipelineRunOperationsGetPipelineRunResponse = PipelineRun; +/** Contains response data for the queryDataFlowDebugSessionsByWorkspace operation. */ +export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse = QueryDataFlowDebugSessionsResponse; /** Optional parameters. */ -export interface PipelineRunOperationsQueryActivityRunsOptionalParams +export interface DataFlowDebugSessionAddDataFlowOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the queryActivityRuns operation. */ -export type PipelineRunOperationsQueryActivityRunsResponse = ActivityRunsQueryResponse; +/** Contains response data for the addDataFlow operation. */ +export type DataFlowDebugSessionAddDataFlowResponse = AddDataFlowToDebugSessionResponse; + +/** Optional parameters. */ +export interface DataFlowDebugSessionDeleteDataFlowDebugSessionOptionalParams + extends coreClient.OperationOptions {} /** Optional parameters. */ -export interface PipelineRunOperationsCancelPipelineRunOptionalParams +export interface DataFlowDebugSessionExecuteCommandOptionalParams extends coreClient.OperationOptions { - /** If true, cancel all the Child pipelines that are triggered by the current pipeline. */ - isRecursive?: boolean; + /** Delay to wait until next poll, in milliseconds. */ + updateIntervalInMs?: number; + /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ + resumeFrom?: string; } +/** Contains response data for the executeCommand operation. */ +export type DataFlowDebugSessionExecuteCommandResponse = DataFlowDebugCommandResponse; + /** Optional parameters. */ -export interface TriggerOperationsGetTriggersByWorkspaceOptionalParams +export interface DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getTriggersByWorkspace operation. */ -export type TriggerOperationsGetTriggersByWorkspaceResponse = TriggerListResponse; +/** Contains response data for the queryDataFlowDebugSessionsByWorkspaceNext operation. */ +export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse = QueryDataFlowDebugSessionsResponse; + +/** Optional parameters. */ +export interface DatasetGetDatasetsByWorkspaceOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the getDatasetsByWorkspace operation. */ +export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse; /** Optional parameters. */ -export interface TriggerOperationsCreateOrUpdateTriggerOptionalParams +export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreClient.OperationOptions { - /** ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ + /** ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11628,21 +11926,21 @@ export interface TriggerOperationsCreateOrUpdateTriggerOptionalParams resumeFrom?: string; } -/** Contains response data for the createOrUpdateTrigger operation. */ -export type TriggerOperationsCreateOrUpdateTriggerResponse = TriggerResource; +/** Contains response data for the createOrUpdateDataset operation. */ +export type DatasetCreateOrUpdateDatasetResponse = DatasetResource; /** Optional parameters. */ -export interface TriggerOperationsGetTriggerOptionalParams +export interface DatasetGetDatasetOptionalParams extends coreClient.OperationOptions { - /** ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ + /** ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } -/** Contains response data for the getTrigger operation. */ -export type TriggerOperationsGetTriggerResponse = TriggerResource; +/** Contains response data for the getDataset operation. */ +export type DatasetGetDatasetResponse = DatasetResource; /** Optional parameters. */ -export interface TriggerOperationsDeleteTriggerOptionalParams +export interface DatasetDeleteDatasetOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11651,7 +11949,7 @@ export interface TriggerOperationsDeleteTriggerOptionalParams } /** Optional parameters. */ -export interface TriggerOperationsSubscribeTriggerToEventsOptionalParams +export interface DatasetRenameDatasetOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11659,18 +11957,46 @@ export interface TriggerOperationsSubscribeTriggerToEventsOptionalParams resumeFrom?: string; } -/** Contains response data for the subscribeTriggerToEvents operation. */ -export type TriggerOperationsSubscribeTriggerToEventsResponse = TriggerSubscriptionOperationStatus; +/** Optional parameters. */ +export interface DatasetGetDatasetsByWorkspaceNextOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the getDatasetsByWorkspaceNext operation. */ +export type DatasetGetDatasetsByWorkspaceNextResponse = DatasetListResponse; + +/** Optional parameters. */ +export interface WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams + extends coreClient.OperationOptions { + /** Can provide a guid, which is helpful for debugging and to provide better customer support */ + clientRequestId?: string; +} + +/** Contains response data for the getGitHubAccessToken operation. */ +export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAccessTokenResponse; /** Optional parameters. */ -export interface TriggerOperationsGetEventSubscriptionStatusOptionalParams +export interface IntegrationRuntimesListOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getEventSubscriptionStatus operation. */ -export type TriggerOperationsGetEventSubscriptionStatusResponse = TriggerSubscriptionOperationStatus; +/** Contains response data for the list operation. */ +export type IntegrationRuntimesListResponse = IntegrationRuntimeListResponse; + +/** Optional parameters. */ +export interface IntegrationRuntimesGetOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the get operation. */ +export type IntegrationRuntimesGetResponse = IntegrationRuntimeResource; + +/** Optional parameters. */ +export interface LibraryListOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the list operation. */ +export type LibraryListOperationResponse = LibraryListResponse; /** Optional parameters. */ -export interface TriggerOperationsUnsubscribeTriggerFromEventsOptionalParams +export interface LibraryFlushOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11678,11 +12004,15 @@ export interface TriggerOperationsUnsubscribeTriggerFromEventsOptionalParams resumeFrom?: string; } -/** Contains response data for the unsubscribeTriggerFromEvents operation. */ -export type TriggerOperationsUnsubscribeTriggerFromEventsResponse = TriggerSubscriptionOperationStatus; +/** Optional parameters. */ +export interface LibraryGetOperationResultOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the getOperationResult operation. */ +export type LibraryGetOperationResultResponse = LibraryResource; /** Optional parameters. */ -export interface TriggerOperationsStartTriggerOptionalParams +export interface LibraryDeleteOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11691,40 +12021,45 @@ export interface TriggerOperationsStartTriggerOptionalParams } /** Optional parameters. */ -export interface TriggerOperationsStopTriggerOptionalParams +export interface LibraryGetOptionalParams extends coreClient.OperationOptions {} + +/** Contains response data for the get operation. */ +export type LibraryGetResponse = LibraryResource; + +/** Optional parameters. */ +export interface LibraryCreateOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; -} - -/** Optional parameters. */ -export interface TriggerOperationsGetTriggersByWorkspaceNextOptionalParams - extends coreClient.OperationOptions {} - -/** Contains response data for the getTriggersByWorkspaceNext operation. */ -export type TriggerOperationsGetTriggersByWorkspaceNextResponse = TriggerListResponse; +} /** Optional parameters. */ -export interface TriggerRunOperationsRerunTriggerInstanceOptionalParams - extends coreClient.OperationOptions {} +export interface LibraryAppendOptionalParams + extends coreClient.OperationOptions { + /** Set this header to a byte offset at which the block is expected to be appended. The request succeeds only if the current offset matches this value. Otherwise, the request fails with the AppendPositionConditionNotMet error (HTTP status code 412 – Precondition Failed) */ + blobConditionAppendPosition?: number; +} /** Optional parameters. */ -export interface TriggerRunOperationsCancelTriggerInstanceOptionalParams +export interface LibraryListNextOptionalParams extends coreClient.OperationOptions {} +/** Contains response data for the listNext operation. */ +export type LibraryListNextResponse = LibraryListResponse; + /** Optional parameters. */ -export interface TriggerRunOperationsQueryTriggerRunsByWorkspaceOptionalParams +export interface LinkedServiceGetLinkedServicesByWorkspaceOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the queryTriggerRunsByWorkspace operation. */ -export type TriggerRunOperationsQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResponse; +/** Contains response data for the getLinkedServicesByWorkspace operation. */ +export type LinkedServiceGetLinkedServicesByWorkspaceResponse = LinkedServiceListResponse; /** Optional parameters. */ -export interface DataFlowOperationsCreateOrUpdateDataFlowOptionalParams +export interface LinkedServiceCreateOrUpdateLinkedServiceOptionalParams extends coreClient.OperationOptions { - /** ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ + /** ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11732,21 +12067,21 @@ export interface DataFlowOperationsCreateOrUpdateDataFlowOptionalParams resumeFrom?: string; } -/** Contains response data for the createOrUpdateDataFlow operation. */ -export type DataFlowOperationsCreateOrUpdateDataFlowResponse = DataFlowResource; +/** Contains response data for the createOrUpdateLinkedService operation. */ +export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceResource; /** Optional parameters. */ -export interface DataFlowOperationsGetDataFlowOptionalParams +export interface LinkedServiceGetLinkedServiceOptionalParams extends coreClient.OperationOptions { - /** ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ + /** ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } -/** Contains response data for the getDataFlow operation. */ -export type DataFlowOperationsGetDataFlowResponse = DataFlowResource; +/** Contains response data for the getLinkedService operation. */ +export type LinkedServiceGetLinkedServiceResponse = LinkedServiceResource; /** Optional parameters. */ -export interface DataFlowOperationsDeleteDataFlowOptionalParams +export interface LinkedServiceDeleteLinkedServiceOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11755,7 +12090,7 @@ export interface DataFlowOperationsDeleteDataFlowOptionalParams } /** Optional parameters. */ -export interface DataFlowOperationsRenameDataFlowOptionalParams +export interface LinkedServiceRenameLinkedServiceOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11764,51 +12099,61 @@ export interface DataFlowOperationsRenameDataFlowOptionalParams } /** Optional parameters. */ -export interface DataFlowOperationsGetDataFlowsByWorkspaceOptionalParams +export interface LinkedServiceGetLinkedServicesByWorkspaceNextOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getDataFlowsByWorkspace operation. */ -export type DataFlowOperationsGetDataFlowsByWorkspaceResponse = DataFlowListResponse; +/** Contains response data for the getLinkedServicesByWorkspaceNext operation. */ +export type LinkedServiceGetLinkedServicesByWorkspaceNextResponse = LinkedServiceListResponse; /** Optional parameters. */ -export interface DataFlowOperationsGetDataFlowsByWorkspaceNextOptionalParams +export interface NotebookGetNotebooksByWorkspaceOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getDataFlowsByWorkspaceNext operation. */ -export type DataFlowOperationsGetDataFlowsByWorkspaceNextResponse = DataFlowListResponse; +/** Contains response data for the getNotebooksByWorkspace operation. */ +export type NotebookGetNotebooksByWorkspaceResponse = NotebookListResponse; /** Optional parameters. */ -export interface DataFlowDebugSessionCreateDataFlowDebugSessionOptionalParams +export interface NotebookGetNotebookSummaryByWorkSpaceOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the getNotebookSummaryByWorkSpace operation. */ +export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse; + +/** Optional parameters. */ +export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreClient.OperationOptions { + /** ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ + ifMatch?: string; /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } -/** Contains response data for the createDataFlowDebugSession operation. */ -export type DataFlowDebugSessionCreateDataFlowDebugSessionResponse = CreateDataFlowDebugSessionResponse; - -/** Optional parameters. */ -export interface DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceOptionalParams - extends coreClient.OperationOptions {} - -/** Contains response data for the queryDataFlowDebugSessionsByWorkspace operation. */ -export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse = QueryDataFlowDebugSessionsResponse; +/** Contains response data for the createOrUpdateNotebook operation. */ +export type NotebookCreateOrUpdateNotebookResponse = NotebookResource; /** Optional parameters. */ -export interface DataFlowDebugSessionAddDataFlowOptionalParams - extends coreClient.OperationOptions {} +export interface NotebookGetNotebookOptionalParams + extends coreClient.OperationOptions { + /** ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ + ifNoneMatch?: string; +} -/** Contains response data for the addDataFlow operation. */ -export type DataFlowDebugSessionAddDataFlowResponse = AddDataFlowToDebugSessionResponse; +/** Contains response data for the getNotebook operation. */ +export type NotebookGetNotebookResponse = NotebookResource; /** Optional parameters. */ -export interface DataFlowDebugSessionDeleteDataFlowDebugSessionOptionalParams - extends coreClient.OperationOptions {} +export interface NotebookDeleteNotebookOptionalParams + extends coreClient.OperationOptions { + /** Delay to wait until next poll, in milliseconds. */ + updateIntervalInMs?: number; + /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ + resumeFrom?: string; +} /** Optional parameters. */ -export interface DataFlowDebugSessionExecuteCommandOptionalParams +export interface NotebookRenameNotebookOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11816,27 +12161,35 @@ export interface DataFlowDebugSessionExecuteCommandOptionalParams resumeFrom?: string; } -/** Contains response data for the executeCommand operation. */ -export type DataFlowDebugSessionExecuteCommandResponse = DataFlowDebugCommandResponse; +/** Optional parameters. */ +export interface NotebookGetNotebooksByWorkspaceNextOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the getNotebooksByWorkspaceNext operation. */ +export type NotebookGetNotebooksByWorkspaceNextResponse = NotebookListResponse; /** Optional parameters. */ -export interface DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextOptionalParams +export interface NotebookGetNotebookSummaryByWorkSpaceNextOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the queryDataFlowDebugSessionsByWorkspaceNext operation. */ -export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse = QueryDataFlowDebugSessionsResponse; +/** Contains response data for the getNotebookSummaryByWorkSpaceNext operation. */ +export type NotebookGetNotebookSummaryByWorkSpaceNextResponse = NotebookListResponse; /** Optional parameters. */ -export interface SqlScriptOperationsGetSqlScriptsByWorkspaceOptionalParams +export interface NotebookOperationResultGetOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getSqlScriptsByWorkspace operation. */ -export type SqlScriptOperationsGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse; +/** Optional parameters. */ +export interface PipelineGetPipelinesByWorkspaceOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the getPipelinesByWorkspace operation. */ +export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse; /** Optional parameters. */ -export interface SqlScriptOperationsCreateOrUpdateSqlScriptOptionalParams +export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreClient.OperationOptions { - /** ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ + /** ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11844,21 +12197,21 @@ export interface SqlScriptOperationsCreateOrUpdateSqlScriptOptionalParams resumeFrom?: string; } -/** Contains response data for the createOrUpdateSqlScript operation. */ -export type SqlScriptOperationsCreateOrUpdateSqlScriptResponse = SqlScriptResource; +/** Contains response data for the createOrUpdatePipeline operation. */ +export type PipelineCreateOrUpdatePipelineResponse = PipelineResource; /** Optional parameters. */ -export interface SqlScriptOperationsGetSqlScriptOptionalParams +export interface PipelineGetPipelineOptionalParams extends coreClient.OperationOptions { - /** ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ + /** ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } -/** Contains response data for the getSqlScript operation. */ -export type SqlScriptOperationsGetSqlScriptResponse = SqlScriptResource; +/** Contains response data for the getPipeline operation. */ +export type PipelineGetPipelineResponse = PipelineResource; /** Optional parameters. */ -export interface SqlScriptOperationsDeleteSqlScriptOptionalParams +export interface PipelineDeletePipelineOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11867,7 +12220,7 @@ export interface SqlScriptOperationsDeleteSqlScriptOptionalParams } /** Optional parameters. */ -export interface SqlScriptOperationsRenameSqlScriptOptionalParams +export interface PipelineRenamePipelineOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11876,21 +12229,65 @@ export interface SqlScriptOperationsRenameSqlScriptOptionalParams } /** Optional parameters. */ -export interface SqlScriptOperationsGetSqlScriptsByWorkspaceNextOptionalParams +export interface PipelineCreatePipelineRunOptionalParams + extends coreClient.OperationOptions { + /** Parameters of the pipeline run. These parameters will be used only if the runId is not specified. */ + parameters?: { [propertyName: string]: any }; + /** The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run. */ + referencePipelineRunId?: string; + /** Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId. */ + isRecovery?: boolean; + /** In recovery mode, the rerun will start from this activity. If not specified, all activities will run. */ + startActivityName?: string; +} + +/** Contains response data for the createPipelineRun operation. */ +export type PipelineCreatePipelineRunResponse = CreateRunResponse; + +/** Optional parameters. */ +export interface PipelineGetPipelinesByWorkspaceNextOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getSqlScriptsByWorkspaceNext operation. */ -export type SqlScriptOperationsGetSqlScriptsByWorkspaceNextResponse = SqlScriptsListResponse; +/** Contains response data for the getPipelinesByWorkspaceNext operation. */ +export type PipelineGetPipelinesByWorkspaceNextResponse = PipelineListResponse; + +/** Optional parameters. */ +export interface PipelineRunQueryPipelineRunsByWorkspaceOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the queryPipelineRunsByWorkspace operation. */ +export type PipelineRunQueryPipelineRunsByWorkspaceResponse = PipelineRunsQueryResponse; + +/** Optional parameters. */ +export interface PipelineRunGetPipelineRunOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the getPipelineRun operation. */ +export type PipelineRunGetPipelineRunResponse = PipelineRun; + +/** Optional parameters. */ +export interface PipelineRunQueryActivityRunsOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the queryActivityRuns operation. */ +export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse; + +/** Optional parameters. */ +export interface PipelineRunCancelPipelineRunOptionalParams + extends coreClient.OperationOptions { + /** If true, cancel all the Child pipelines that are triggered by the current pipeline. */ + isRecursive?: boolean; +} /** Optional parameters. */ -export interface SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceOptionalParams +export interface SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getSparkJobDefinitionsByWorkspace operation. */ -export type SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceResponse = SparkJobDefinitionsListResponse; +export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse = SparkJobDefinitionsListResponse; /** Optional parameters. */ -export interface SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionOptionalParams +export interface SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { /** ETag of the Spark Job Definition entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; @@ -11901,20 +12298,20 @@ export interface SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionOpt } /** Contains response data for the createOrUpdateSparkJobDefinition operation. */ -export type SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionResponse = SparkJobDefinitionResource; +export type SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse = SparkJobDefinitionResource; /** Optional parameters. */ -export interface SparkJobDefinitionOperationsGetSparkJobDefinitionOptionalParams +export interface SparkJobDefinitionGetSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { /** ETag of the Spark Job Definition entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } /** Contains response data for the getSparkJobDefinition operation. */ -export type SparkJobDefinitionOperationsGetSparkJobDefinitionResponse = SparkJobDefinitionResource; +export type SparkJobDefinitionGetSparkJobDefinitionResponse = SparkJobDefinitionResource; /** Optional parameters. */ -export interface SparkJobDefinitionOperationsDeleteSparkJobDefinitionOptionalParams +export interface SparkJobDefinitionDeleteSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11923,7 +12320,7 @@ export interface SparkJobDefinitionOperationsDeleteSparkJobDefinitionOptionalPar } /** Optional parameters. */ -export interface SparkJobDefinitionOperationsExecuteSparkJobDefinitionOptionalParams +export interface SparkJobDefinitionExecuteSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11932,10 +12329,10 @@ export interface SparkJobDefinitionOperationsExecuteSparkJobDefinitionOptionalPa } /** Contains response data for the executeSparkJobDefinition operation. */ -export type SparkJobDefinitionOperationsExecuteSparkJobDefinitionResponse = SparkBatchJob; +export type SparkJobDefinitionExecuteSparkJobDefinitionResponse = SparkBatchJob; /** Optional parameters. */ -export interface SparkJobDefinitionOperationsRenameSparkJobDefinitionOptionalParams +export interface SparkJobDefinitionRenameSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11944,7 +12341,7 @@ export interface SparkJobDefinitionOperationsRenameSparkJobDefinitionOptionalPar } /** Optional parameters. */ -export interface SparkJobDefinitionOperationsDebugSparkJobDefinitionOptionalParams +export interface SparkJobDefinitionDebugSparkJobDefinitionOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11953,33 +12350,40 @@ export interface SparkJobDefinitionOperationsDebugSparkJobDefinitionOptionalPara } /** Contains response data for the debugSparkJobDefinition operation. */ -export type SparkJobDefinitionOperationsDebugSparkJobDefinitionResponse = SparkBatchJob; +export type SparkJobDefinitionDebugSparkJobDefinitionResponse = SparkBatchJob; /** Optional parameters. */ -export interface SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceNextOptionalParams +export interface SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the getSparkJobDefinitionsByWorkspaceNext operation. */ -export type SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceNextResponse = SparkJobDefinitionsListResponse; +export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse = SparkJobDefinitionsListResponse; /** Optional parameters. */ -export interface NotebookOperationsGetNotebooksByWorkspaceOptionalParams +export interface SqlPoolsListOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getNotebooksByWorkspace operation. */ -export type NotebookOperationsGetNotebooksByWorkspaceResponse = NotebookListResponse; +/** Contains response data for the list operation. */ +export type SqlPoolsListResponse = SqlPoolInfoListResult; /** Optional parameters. */ -export interface NotebookOperationsGetNotebookSummaryByWorkSpaceOptionalParams +export interface SqlPoolsGetOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the getNotebookSummaryByWorkSpace operation. */ -export type NotebookOperationsGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse; +/** Contains response data for the get operation. */ +export type SqlPoolsGetResponse = SqlPool; + +/** Optional parameters. */ +export interface SqlScriptGetSqlScriptsByWorkspaceOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the getSqlScriptsByWorkspace operation. */ +export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse; /** Optional parameters. */ -export interface NotebookOperationsCreateOrUpdateNotebookOptionalParams +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreClient.OperationOptions { - /** ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ + /** ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ ifMatch?: string; /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -11987,21 +12391,21 @@ export interface NotebookOperationsCreateOrUpdateNotebookOptionalParams resumeFrom?: string; } -/** Contains response data for the createOrUpdateNotebook operation. */ -export type NotebookOperationsCreateOrUpdateNotebookResponse = NotebookResource; +/** Contains response data for the createOrUpdateSqlScript operation. */ +export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource; /** Optional parameters. */ -export interface NotebookOperationsGetNotebookOptionalParams +export interface SqlScriptGetSqlScriptOptionalParams extends coreClient.OperationOptions { - /** ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ + /** ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ ifNoneMatch?: string; } -/** Contains response data for the getNotebook operation. */ -export type NotebookOperationsGetNotebookResponse = NotebookResource; +/** Contains response data for the getSqlScript operation. */ +export type SqlScriptGetSqlScriptResponse = SqlScriptResource; /** Optional parameters. */ -export interface NotebookOperationsDeleteNotebookOptionalParams +export interface SqlScriptDeleteSqlScriptOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -12010,7 +12414,7 @@ export interface NotebookOperationsDeleteNotebookOptionalParams } /** Optional parameters. */ -export interface NotebookOperationsRenameNotebookOptionalParams +export interface SqlScriptRenameSqlScriptOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -12019,77 +12423,73 @@ export interface NotebookOperationsRenameNotebookOptionalParams } /** Optional parameters. */ -export interface NotebookOperationsGetNotebooksByWorkspaceNextOptionalParams - extends coreClient.OperationOptions {} - -/** Contains response data for the getNotebooksByWorkspaceNext operation. */ -export type NotebookOperationsGetNotebooksByWorkspaceNextResponse = NotebookListResponse; - -/** Optional parameters. */ -export interface NotebookOperationsGetNotebookSummaryByWorkSpaceNextOptionalParams - extends coreClient.OperationOptions {} - -/** Contains response data for the getNotebookSummaryByWorkSpaceNext operation. */ -export type NotebookOperationsGetNotebookSummaryByWorkSpaceNextResponse = NotebookListResponse; - -/** Optional parameters. */ -export interface WorkspaceOperationsGetOptionalParams +export interface SqlScriptGetSqlScriptsByWorkspaceNextOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the get operation. */ -export type WorkspaceOperationsGetResponse = Workspace; - -/** Optional parameters. */ -export interface SqlPoolsListOptionalParams - extends coreClient.OperationOptions {} - -/** Contains response data for the list operation. */ -export type SqlPoolsListResponse = SqlPoolInfoListResult; +/** Contains response data for the getSqlScriptsByWorkspaceNext operation. */ +export type SqlScriptGetSqlScriptsByWorkspaceNextResponse = SqlScriptsListResponse; /** Optional parameters. */ -export interface SqlPoolsGetOptionalParams +export interface TriggerGetTriggersByWorkspaceOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the get operation. */ -export type SqlPoolsGetResponse = SqlPool; +/** Contains response data for the getTriggersByWorkspace operation. */ +export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse; /** Optional parameters. */ -export interface BigDataPoolsListOptionalParams - extends coreClient.OperationOptions {} +export interface TriggerCreateOrUpdateTriggerOptionalParams + extends coreClient.OperationOptions { + /** ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ + ifMatch?: string; + /** Delay to wait until next poll, in milliseconds. */ + updateIntervalInMs?: number; + /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ + resumeFrom?: string; +} -/** Contains response data for the list operation. */ -export type BigDataPoolsListResponse = BigDataPoolResourceInfoListResult; +/** Contains response data for the createOrUpdateTrigger operation. */ +export type TriggerCreateOrUpdateTriggerResponse = TriggerResource; /** Optional parameters. */ -export interface BigDataPoolsGetOptionalParams - extends coreClient.OperationOptions {} +export interface TriggerGetTriggerOptionalParams + extends coreClient.OperationOptions { + /** ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ + ifNoneMatch?: string; +} -/** Contains response data for the get operation. */ -export type BigDataPoolsGetResponse = BigDataPoolResourceInfo; +/** Contains response data for the getTrigger operation. */ +export type TriggerGetTriggerResponse = TriggerResource; /** Optional parameters. */ -export interface IntegrationRuntimesListOptionalParams - extends coreClient.OperationOptions {} - -/** Contains response data for the list operation. */ -export type IntegrationRuntimesListResponse = IntegrationRuntimeListResponse; +export interface TriggerDeleteTriggerOptionalParams + extends coreClient.OperationOptions { + /** Delay to wait until next poll, in milliseconds. */ + updateIntervalInMs?: number; + /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ + resumeFrom?: string; +} /** Optional parameters. */ -export interface IntegrationRuntimesGetOptionalParams - extends coreClient.OperationOptions {} +export interface TriggerSubscribeTriggerToEventsOptionalParams + extends coreClient.OperationOptions { + /** Delay to wait until next poll, in milliseconds. */ + updateIntervalInMs?: number; + /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ + resumeFrom?: string; +} -/** Contains response data for the get operation. */ -export type IntegrationRuntimesGetResponse = IntegrationRuntimeResource; +/** Contains response data for the subscribeTriggerToEvents operation. */ +export type TriggerSubscribeTriggerToEventsResponse = TriggerSubscriptionOperationStatus; /** Optional parameters. */ -export interface LibraryListOptionalParams +export interface TriggerGetEventSubscriptionStatusOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the list operation. */ -export type LibraryListOperationResponse = LibraryListResponse; +/** Contains response data for the getEventSubscriptionStatus operation. */ +export type TriggerGetEventSubscriptionStatusResponse = TriggerSubscriptionOperationStatus; /** Optional parameters. */ -export interface LibraryFlushOptionalParams +export interface TriggerUnsubscribeTriggerFromEventsOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -12097,15 +12497,11 @@ export interface LibraryFlushOptionalParams resumeFrom?: string; } -/** Optional parameters. */ -export interface LibraryGetOperationResultOptionalParams - extends coreClient.OperationOptions {} - -/** Contains response data for the getOperationResult operation. */ -export type LibraryGetOperationResultResponse = LibraryResource; +/** Contains response data for the unsubscribeTriggerFromEvents operation. */ +export type TriggerUnsubscribeTriggerFromEventsResponse = TriggerSubscriptionOperationStatus; /** Optional parameters. */ -export interface LibraryDeleteOptionalParams +export interface TriggerStartTriggerOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -12114,13 +12510,7 @@ export interface LibraryDeleteOptionalParams } /** Optional parameters. */ -export interface LibraryGetOptionalParams extends coreClient.OperationOptions {} - -/** Contains response data for the get operation. */ -export type LibraryGetResponse = LibraryResource; - -/** Optional parameters. */ -export interface LibraryCreateOptionalParams +export interface TriggerStopTriggerOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; @@ -12129,34 +12519,37 @@ export interface LibraryCreateOptionalParams } /** Optional parameters. */ -export interface LibraryAppendOptionalParams - extends coreClient.OperationOptions { - /** Set this header to a byte offset at which the block is expected to be appended. The request succeeds only if the current offset matches this value. Otherwise, the request fails with the AppendPositionConditionNotMet error (HTTP status code 412 – Precondition Failed) */ - blobConditionAppendPosition?: number; -} +export interface TriggerGetTriggersByWorkspaceNextOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the getTriggersByWorkspaceNext operation. */ +export type TriggerGetTriggersByWorkspaceNextResponse = TriggerListResponse; /** Optional parameters. */ -export interface LibraryListNextOptionalParams +export interface TriggerRunRerunTriggerInstanceOptionalParams extends coreClient.OperationOptions {} -/** Contains response data for the listNext operation. */ -export type LibraryListNextResponse = LibraryListResponse; +/** Optional parameters. */ +export interface TriggerRunCancelTriggerInstanceOptionalParams + extends coreClient.OperationOptions {} /** Optional parameters. */ -export interface WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams - extends coreClient.OperationOptions { - /** Can provide a guid, which is helpful for debugging and to provide better customer support */ - clientRequestId?: string; -} +export interface TriggerRunQueryTriggerRunsByWorkspaceOptionalParams + extends coreClient.OperationOptions {} -/** Contains response data for the getGitHubAccessToken operation. */ -export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAccessTokenResponse; +/** Contains response data for the queryTriggerRunsByWorkspace operation. */ +export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResponse; + +/** Optional parameters. */ +export interface WorkspaceGetOptionalParams + extends coreClient.OperationOptions {} + +/** Contains response data for the get operation. */ +export type WorkspaceGetResponse = Workspace; /** Optional parameters. */ export interface ArtifactsClientOptionalParams extends coreClient.ServiceClientOptions { - /** Api Version */ - apiVersion?: string; /** Overrides client endpoint. */ endpoint?: string; } diff --git a/sdk/synapse/synapse-artifacts/src/models/mappers.ts b/sdk/synapse/synapse-artifacts/src/models/mappers.ts index c6f7072e9309..430eef5847a7 100644 --- a/sdk/synapse/synapse-artifacts/src/models/mappers.ts +++ b/sdk/synapse/synapse-artifacts/src/models/mappers.ts @@ -8,20 +8,19 @@ import * as coreClient from "@azure/core-client"; -export const LinkedServiceListResponse: coreClient.CompositeMapper = { +export const KqlScriptsResourceCollectionResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "LinkedServiceListResponse", + className: "KqlScriptsResourceCollectionResponse", modelProperties: { value: { serializedName: "value", - required: true, type: { name: "Sequence", element: { type: { name: "Composite", - className: "LinkedServiceResource" + className: "KqlScriptResource" } } } @@ -36,177 +35,185 @@ export const LinkedServiceListResponse: coreClient.CompositeMapper = { } }; -export const Resource: coreClient.CompositeMapper = { +export const KqlScriptResource: coreClient.CompositeMapper = { type: { name: "Composite", - className: "Resource", + className: "KqlScriptResource", modelProperties: { id: { serializedName: "id", - readOnly: true, type: { name: "String" } }, name: { serializedName: "name", - readOnly: true, type: { name: "String" } }, type: { serializedName: "type", - readOnly: true, type: { name: "String" } + }, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "KqlScript" + } } } } }; -export const LinkedService: coreClient.CompositeMapper = { - serializedName: "LinkedService", +export const KqlScript: coreClient.CompositeMapper = { type: { name: "Composite", - className: "LinkedService", - uberParent: "LinkedService", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: { - serializedName: "type", - clientName: "type" - }, + className: "KqlScript", modelProperties: { - type: { - serializedName: "type", - required: true, - type: { - name: "String" - } - }, - connectVia: { - serializedName: "connectVia", + content: { + serializedName: "content", type: { name: "Composite", - className: "IntegrationRuntimeReference" + className: "KqlScriptContent" } - }, - description: { - serializedName: "description", + } + } + } +}; + +export const KqlScriptContent: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "KqlScriptContent", + modelProperties: { + query: { + serializedName: "query", type: { name: "String" } }, - parameters: { - serializedName: "parameters", + metadata: { + serializedName: "metadata", type: { - name: "Dictionary", - value: { - type: { name: "Composite", className: "ParameterSpecification" } - } + name: "Composite", + className: "KqlScriptContentMetadata" } }, - annotations: { - serializedName: "annotations", + currentConnection: { + serializedName: "currentConnection", type: { - name: "Sequence", - element: { - type: { - name: "any" - } - } + name: "Composite", + className: "KqlScriptContentCurrentConnection" } } } } }; -export const IntegrationRuntimeReference: coreClient.CompositeMapper = { +export const KqlScriptContentMetadata: coreClient.CompositeMapper = { type: { name: "Composite", - className: "IntegrationRuntimeReference", + className: "KqlScriptContentMetadata", modelProperties: { - type: { - serializedName: "type", - required: true, + language: { + serializedName: "language", type: { name: "String" } - }, - referenceName: { - serializedName: "referenceName", - required: true, + } + } + } +}; + +export const KqlScriptContentCurrentConnection: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "KqlScriptContentCurrentConnection", + modelProperties: { + name: { + serializedName: "name", type: { name: "String" } }, - parameters: { - serializedName: "parameters", + type: { + serializedName: "type", type: { - name: "Dictionary", - value: { type: { name: "any" } } + name: "String" } } } } }; -export const ParameterSpecification: coreClient.CompositeMapper = { +export const ErrorContract: coreClient.CompositeMapper = { type: { name: "Composite", - className: "ParameterSpecification", + className: "ErrorContract", modelProperties: { - type: { - serializedName: "type", - required: true, - type: { - name: "String" - } - }, - defaultValue: { - serializedName: "defaultValue", + error: { + serializedName: "error", type: { - name: "any" + name: "Composite", + className: "ErrorResponse" } } } } }; -export const CloudError: coreClient.CompositeMapper = { +export const ErrorResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "CloudError", + className: "ErrorResponse", modelProperties: { code: { - serializedName: "error.code", - required: true, + serializedName: "code", + readOnly: true, type: { name: "String" } }, message: { - serializedName: "error.message", - required: true, + serializedName: "message", + readOnly: true, type: { name: "String" } }, target: { - serializedName: "error.target", + serializedName: "target", + readOnly: true, type: { name: "String" } }, details: { - serializedName: "error.details", + serializedName: "details", + readOnly: true, type: { name: "Sequence", element: { type: { name: "Composite", - className: "CloudError" + className: "ErrorResponse" + } + } + } + }, + additionalInfo: { + serializedName: "additionalInfo", + readOnly: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ErrorAdditionalInfo" } } } @@ -215,6 +222,29 @@ export const CloudError: coreClient.CompositeMapper = { } }; +export const ErrorAdditionalInfo: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorAdditionalInfo", + modelProperties: { + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String" + } + }, + info: { + serializedName: "info", + readOnly: true, + type: { + name: "any" + } + } + } + } +}; + export const ArtifactRenameRequest: coreClient.CompositeMapper = { type: { name: "Composite", @@ -235,10 +265,10 @@ export const ArtifactRenameRequest: coreClient.CompositeMapper = { } }; -export const DatasetListResponse: coreClient.CompositeMapper = { +export const SparkConfigurationListResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DatasetListResponse", + className: "SparkConfigurationListResponse", modelProperties: { value: { serializedName: "value", @@ -248,7 +278,7 @@ export const DatasetListResponse: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "DatasetResource" + className: "SparkConfigurationResource" } } } @@ -263,57 +293,53 @@ export const DatasetListResponse: coreClient.CompositeMapper = { } }; -export const Dataset: coreClient.CompositeMapper = { - serializedName: "Dataset", +export const Resource: coreClient.CompositeMapper = { type: { name: "Composite", - className: "Dataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: { - serializedName: "type", - clientName: "type" - }, + className: "Resource", modelProperties: { - type: { - serializedName: "type", - required: true, + id: { + serializedName: "id", + readOnly: true, type: { name: "String" } }, - description: { - serializedName: "description", + name: { + serializedName: "name", + readOnly: true, type: { name: "String" } }, - structure: { - serializedName: "structure", - type: { - name: "any" - } - }, - schema: { - serializedName: "schema", + type: { + serializedName: "type", + readOnly: true, type: { - name: "any" + name: "String" } - }, - linkedServiceName: { - serializedName: "linkedServiceName", + } + } + } +}; + +export const SparkConfiguration: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "SparkConfiguration", + modelProperties: { + description: { + serializedName: "description", type: { - name: "Composite", - className: "LinkedServiceReference" + name: "String" } }, - parameters: { - serializedName: "parameters", + configs: { + serializedName: "configs", + required: true, type: { name: "Dictionary", - value: { - type: { name: "Composite", className: "ParameterSpecification" } - } + value: { type: { name: "String" } } } }, annotations: { @@ -322,211 +348,261 @@ export const Dataset: coreClient.CompositeMapper = { name: "Sequence", element: { type: { - name: "any" + name: "String" } } } }, - folder: { - serializedName: "folder", + notes: { + serializedName: "notes", type: { - name: "Composite", - className: "DatasetFolder" + name: "String" + } + }, + createdBy: { + serializedName: "createdBy", + type: { + name: "String" + } + }, + created: { + serializedName: "created", + type: { + name: "DateTime" + } + }, + configMergeRule: { + serializedName: "configMergeRule", + type: { + name: "Dictionary", + value: { type: { name: "String" } } } } } } }; -export const LinkedServiceReference: coreClient.CompositeMapper = { +export const CloudError: coreClient.CompositeMapper = { type: { name: "Composite", - className: "LinkedServiceReference", + className: "CloudError", modelProperties: { - type: { - serializedName: "type", + code: { + serializedName: "error.code", required: true, type: { name: "String" } }, - referenceName: { - serializedName: "referenceName", + message: { + serializedName: "error.message", required: true, type: { name: "String" } }, - parameters: { - serializedName: "parameters", + target: { + serializedName: "error.target", type: { - name: "Dictionary", - value: { type: { name: "any" } } + name: "String" + } + }, + details: { + serializedName: "error.details", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CloudError" + } + } } } } } }; -export const DatasetFolder: coreClient.CompositeMapper = { +export const BigDataPoolResourceInfoListResult: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DatasetFolder", + className: "BigDataPoolResourceInfoListResult", modelProperties: { - name: { - serializedName: "name", + nextLink: { + serializedName: "nextLink", type: { name: "String" } - } - } - } -}; - -export const PipelineListResponse: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "PipelineListResponse", - modelProperties: { + }, value: { serializedName: "value", - required: true, type: { name: "Sequence", element: { type: { name: "Composite", - className: "PipelineResource" + className: "BigDataPoolResourceInfo" } } } - }, - nextLink: { - serializedName: "nextLink", - type: { - name: "String" - } } } } }; -export const Activity: coreClient.CompositeMapper = { - serializedName: "Activity", +export const AutoScaleProperties: coreClient.CompositeMapper = { type: { name: "Composite", - className: "Activity", - uberParent: "Activity", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: { - serializedName: "type", - clientName: "type" - }, + className: "AutoScaleProperties", modelProperties: { - name: { - serializedName: "name", - required: true, + minNodeCount: { + serializedName: "minNodeCount", type: { - name: "String" + name: "Number" } }, - type: { - serializedName: "type", - required: true, + enabled: { + serializedName: "enabled", type: { - name: "String" + name: "Boolean" } }, - description: { - serializedName: "description", + maxNodeCount: { + serializedName: "maxNodeCount", type: { - name: "String" + name: "Number" } - }, - dependsOn: { - serializedName: "dependsOn", + } + } + } +}; + +export const AutoPauseProperties: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "AutoPauseProperties", + modelProperties: { + delayInMinutes: { + serializedName: "delayInMinutes", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ActivityDependency" - } - } + name: "Number" } }, - userProperties: { - serializedName: "userProperties", + enabled: { + serializedName: "enabled", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "UserProperty" - } - } + name: "Boolean" } } } } }; -export const ActivityDependency: coreClient.CompositeMapper = { +export const DynamicExecutorAllocation: coreClient.CompositeMapper = { type: { name: "Composite", - className: "ActivityDependency", - additionalProperties: { type: { name: "Object" } }, + className: "DynamicExecutorAllocation", modelProperties: { - activity: { - serializedName: "activity", - required: true, + enabled: { + serializedName: "enabled", + type: { + name: "Boolean" + } + } + } + } +}; + +export const LibraryRequirements: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "LibraryRequirements", + modelProperties: { + time: { + serializedName: "time", + readOnly: true, + type: { + name: "DateTime" + } + }, + content: { + serializedName: "content", type: { name: "String" } }, - dependencyConditions: { - serializedName: "dependencyConditions", - required: true, + filename: { + serializedName: "filename", type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } + name: "String" } } } } }; -export const UserProperty: coreClient.CompositeMapper = { +export const LibraryInfo: coreClient.CompositeMapper = { type: { name: "Composite", - className: "UserProperty", + className: "LibraryInfo", modelProperties: { name: { serializedName: "name", - required: true, type: { name: "String" } }, - value: { - serializedName: "value", - required: true, + path: { + serializedName: "path", type: { - name: "any" + name: "String" + } + }, + containerName: { + serializedName: "containerName", + type: { + name: "String" + } + }, + uploadedTimestamp: { + serializedName: "uploadedTimestamp", + type: { + name: "DateTime" + } + }, + type: { + serializedName: "type", + type: { + name: "String" + } + }, + provisioningStatus: { + serializedName: "provisioningStatus", + readOnly: true, + type: { + name: "String" + } + }, + creatorId: { + serializedName: "creatorId", + readOnly: true, + type: { + name: "String" } } } } }; -export const VariableSpecification: coreClient.CompositeMapper = { +export const DataFlow: coreClient.CompositeMapper = { type: { name: "Composite", - className: "VariableSpecification", + className: "DataFlow", + uberParent: "DataFlow", + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, modelProperties: { type: { serializedName: "type", @@ -535,20 +611,38 @@ export const VariableSpecification: coreClient.CompositeMapper = { name: "String" } }, - defaultValue: { - serializedName: "defaultValue", + description: { + serializedName: "description", type: { - name: "any" + name: "String" + } + }, + annotations: { + serializedName: "annotations", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + folder: { + serializedName: "folder", + type: { + name: "Composite", + className: "DataFlowFolder" } } } } }; -export const PipelineFolder: coreClient.CompositeMapper = { +export const DataFlowFolder: coreClient.CompositeMapper = { type: { name: "Composite", - className: "PipelineFolder", + className: "DataFlowFolder", modelProperties: { name: { serializedName: "name", @@ -560,125 +654,145 @@ export const PipelineFolder: coreClient.CompositeMapper = { } }; -export const CreateRunResponse: coreClient.CompositeMapper = { +export const CloudErrorAutoGenerated: coreClient.CompositeMapper = { type: { name: "Composite", - className: "CreateRunResponse", + className: "CloudErrorAutoGenerated", modelProperties: { - runId: { - serializedName: "runId", + code: { + serializedName: "error.code", required: true, type: { name: "String" } - } - } - } -}; - -export const RunFilterParameters: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "RunFilterParameters", - modelProperties: { - continuationToken: { - serializedName: "continuationToken", - type: { - name: "String" - } }, - lastUpdatedAfter: { - serializedName: "lastUpdatedAfter", + message: { + serializedName: "error.message", required: true, type: { - name: "DateTime" + name: "String" } }, - lastUpdatedBefore: { - serializedName: "lastUpdatedBefore", - required: true, + target: { + serializedName: "error.target", type: { - name: "DateTime" + name: "String" } }, - filters: { - serializedName: "filters", + details: { + serializedName: "error.details", type: { name: "Sequence", element: { type: { name: "Composite", - className: "RunQueryFilter" + className: "CloudErrorAutoGenerated" } } } - }, - orderBy: { - serializedName: "orderBy", + } + } + } +}; + +export const DataFlowListResponse: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, type: { name: "Sequence", element: { type: { name: "Composite", - className: "RunQueryOrderBy" + className: "DataFlowResource" } } } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } } } } }; -export const RunQueryFilter: coreClient.CompositeMapper = { +export const CreateDataFlowDebugSessionRequest: coreClient.CompositeMapper = { type: { name: "Composite", - className: "RunQueryFilter", + className: "CreateDataFlowDebugSessionRequest", modelProperties: { - operand: { - serializedName: "operand", - required: true, + computeType: { + serializedName: "computeType", type: { name: "String" } }, - operator: { - serializedName: "operator", - required: true, + coreCount: { + serializedName: "coreCount", type: { - name: "String" + name: "Number" } }, - values: { - serializedName: "values", - required: true, + timeToLive: { + serializedName: "timeToLive", type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } + name: "Number" + } + }, + integrationRuntime: { + serializedName: "integrationRuntime", + type: { + name: "Composite", + className: "IntegrationRuntimeDebugResource" } } } } }; -export const RunQueryOrderBy: coreClient.CompositeMapper = { +export const SubResourceDebugResource: coreClient.CompositeMapper = { type: { name: "Composite", - className: "RunQueryOrderBy", + className: "SubResourceDebugResource", modelProperties: { - orderBy: { - serializedName: "orderBy", + name: { + serializedName: "name", + type: { + name: "String" + } + } + } + } +}; + +export const IntegrationRuntime: coreClient.CompositeMapper = { + serializedName: "IntegrationRuntime", + type: { + name: "Composite", + className: "IntegrationRuntime", + uberParent: "IntegrationRuntime", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", required: true, type: { name: "String" } }, - order: { - serializedName: "order", - required: true, + description: { + serializedName: "description", type: { name: "String" } @@ -687,26 +801,40 @@ export const RunQueryOrderBy: coreClient.CompositeMapper = { } }; -export const PipelineRunsQueryResponse: coreClient.CompositeMapper = { +export const CreateDataFlowDebugSessionResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "PipelineRunsQueryResponse", + className: "CreateDataFlowDebugSessionResponse", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + } + } + } +}; + +export const QueryDataFlowDebugSessionsResponse: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "QueryDataFlowDebugSessionsResponse", modelProperties: { value: { serializedName: "value", - required: true, type: { name: "Sequence", element: { type: { name: "Composite", - className: "PipelineRun" + className: "DataFlowDebugSessionInfo" } } } }, - continuationToken: { - serializedName: "continuationToken", + nextLink: { + serializedName: "nextLink", type: { name: "String" } @@ -715,94 +843,62 @@ export const PipelineRunsQueryResponse: coreClient.CompositeMapper = { } }; -export const PipelineRun: coreClient.CompositeMapper = { +export const DataFlowDebugSessionInfo: coreClient.CompositeMapper = { type: { name: "Composite", - className: "PipelineRun", + className: "DataFlowDebugSessionInfo", additionalProperties: { type: { name: "Object" } }, modelProperties: { - runId: { - serializedName: "runId", - readOnly: true, - type: { - name: "String" - } - }, - runGroupId: { - serializedName: "runGroupId", - readOnly: true, + dataFlowName: { + serializedName: "dataFlowName", type: { name: "String" } }, - isLatest: { - serializedName: "isLatest", - readOnly: true, - type: { - name: "Boolean" - } - }, - pipelineName: { - serializedName: "pipelineName", - readOnly: true, + computeType: { + serializedName: "computeType", type: { name: "String" } }, - parameters: { - serializedName: "parameters", - readOnly: true, + coreCount: { + serializedName: "coreCount", type: { - name: "Dictionary", - value: { type: { name: "String" } } + name: "Number" } }, - invokedBy: { - serializedName: "invokedBy", + nodeCount: { + serializedName: "nodeCount", type: { - name: "Composite", - className: "PipelineRunInvokedBy" + name: "Number" } }, - lastUpdated: { - serializedName: "lastUpdated", - readOnly: true, + integrationRuntimeName: { + serializedName: "integrationRuntimeName", type: { - name: "DateTime" + name: "String" } }, - runStart: { - serializedName: "runStart", - readOnly: true, + sessionId: { + serializedName: "sessionId", type: { - name: "DateTime" + name: "String" } }, - runEnd: { - serializedName: "runEnd", - readOnly: true, - nullable: true, + startTime: { + serializedName: "startTime", type: { - name: "DateTime" + name: "String" } }, - durationInMs: { - serializedName: "durationInMs", - readOnly: true, + timeToLiveInMinutes: { + serializedName: "timeToLiveInMinutes", type: { name: "Number" } }, - status: { - serializedName: "status", - readOnly: true, - type: { - name: "String" - } - }, - message: { - serializedName: "message", - readOnly: true, + lastActivityTime: { + serializedName: "lastActivityTime", type: { name: "String" } @@ -811,185 +907,201 @@ export const PipelineRun: coreClient.CompositeMapper = { } }; -export const PipelineRunInvokedBy: coreClient.CompositeMapper = { +export const DataFlowDebugPackage: coreClient.CompositeMapper = { type: { name: "Composite", - className: "PipelineRunInvokedBy", + className: "DataFlowDebugPackage", + additionalProperties: { type: { name: "Object" } }, modelProperties: { - name: { - serializedName: "name", - readOnly: true, + sessionId: { + serializedName: "sessionId", type: { name: "String" } }, - id: { - serializedName: "id", - readOnly: true, + dataFlow: { + serializedName: "dataFlow", type: { - name: "String" + name: "Composite", + className: "DataFlowDebugResource" } }, - invokedByType: { - serializedName: "invokedByType", - readOnly: true, + datasets: { + serializedName: "datasets", type: { - name: "String" - } - } - } - } -}; - -export const ActivityRunsQueryResponse: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "ActivityRunsQueryResponse", - modelProperties: { - value: { - serializedName: "value", - required: true, + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetDebugResource" + } + } + } + }, + linkedServices: { + serializedName: "linkedServices", type: { name: "Sequence", element: { type: { name: "Composite", - className: "ActivityRun" + className: "LinkedServiceDebugResource" } } } }, - continuationToken: { - serializedName: "continuationToken", + staging: { + serializedName: "staging", type: { - name: "String" + name: "Composite", + className: "DataFlowStagingInfo" + } + }, + debugSettings: { + serializedName: "debugSettings", + type: { + name: "Composite", + className: "DataFlowDebugPackageDebugSettings" } } } } }; -export const ActivityRun: coreClient.CompositeMapper = { +export const Dataset: coreClient.CompositeMapper = { + serializedName: "Dataset", type: { name: "Composite", - className: "ActivityRun", + className: "Dataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, modelProperties: { - pipelineName: { - serializedName: "pipelineName", - readOnly: true, - type: { - name: "String" - } - }, - pipelineRunId: { - serializedName: "pipelineRunId", - readOnly: true, + type: { + serializedName: "type", + required: true, type: { name: "String" } }, - activityName: { - serializedName: "activityName", - readOnly: true, + description: { + serializedName: "description", type: { name: "String" } }, - activityType: { - serializedName: "activityType", - readOnly: true, + structure: { + serializedName: "structure", type: { - name: "String" + name: "any" } }, - activityRunId: { - serializedName: "activityRunId", - readOnly: true, + schema: { + serializedName: "schema", type: { - name: "String" + name: "any" } }, linkedServiceName: { serializedName: "linkedServiceName", - readOnly: true, - type: { - name: "String" - } - }, - status: { - serializedName: "status", - readOnly: true, type: { - name: "String" + name: "Composite", + className: "LinkedServiceReference" } }, - activityRunStart: { - serializedName: "activityRunStart", - readOnly: true, + parameters: { + serializedName: "parameters", type: { - name: "DateTime" + name: "Dictionary", + value: { + type: { name: "Composite", className: "ParameterSpecification" } + } } }, - activityRunEnd: { - serializedName: "activityRunEnd", - readOnly: true, + annotations: { + serializedName: "annotations", type: { - name: "DateTime" + name: "Sequence", + element: { + type: { + name: "any" + } + } } }, - durationInMs: { - serializedName: "durationInMs", - readOnly: true, + folder: { + serializedName: "folder", type: { - name: "Number" + name: "Composite", + className: "DatasetFolder" } - }, - input: { - serializedName: "input", - readOnly: true, + } + } + } +}; + +export const LinkedServiceReference: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedServiceReference", + modelProperties: { + type: { + serializedName: "type", + required: true, type: { - name: "any" + name: "String" } }, - output: { - serializedName: "output", - readOnly: true, + referenceName: { + serializedName: "referenceName", + required: true, type: { - name: "any" + name: "String" } }, - error: { - serializedName: "error", - readOnly: true, + parameters: { + serializedName: "parameters", type: { - name: "any" + name: "Dictionary", + value: { type: { name: "any" } } } } } } }; -export const TriggerListResponse: coreClient.CompositeMapper = { +export const ParameterSpecification: coreClient.CompositeMapper = { type: { name: "Composite", - className: "TriggerListResponse", + className: "ParameterSpecification", modelProperties: { - value: { - serializedName: "value", + type: { + serializedName: "type", required: true, type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "TriggerResource" - } - } + name: "String" } }, - nextLink: { - serializedName: "nextLink", + defaultValue: { + serializedName: "defaultValue", + type: { + name: "any" + } + } + } + } +}; + +export const DatasetFolder: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetFolder", + modelProperties: { + name: { + serializedName: "name", type: { name: "String" } @@ -998,12 +1110,12 @@ export const TriggerListResponse: coreClient.CompositeMapper = { } }; -export const Trigger: coreClient.CompositeMapper = { - serializedName: "Trigger", +export const LinkedService: coreClient.CompositeMapper = { + serializedName: "LinkedService", type: { name: "Composite", - className: "Trigger", - uberParent: "Trigger", + className: "LinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: { serializedName: "type", @@ -1017,17 +1129,26 @@ export const Trigger: coreClient.CompositeMapper = { name: "String" } }, + connectVia: { + serializedName: "connectVia", + type: { + name: "Composite", + className: "IntegrationRuntimeReference" + } + }, description: { serializedName: "description", type: { name: "String" } }, - runtimeState: { - serializedName: "runtimeState", - readOnly: true, + parameters: { + serializedName: "parameters", type: { - name: "String" + name: "Dictionary", + value: { + type: { name: "Composite", className: "ParameterSpecification" } + } } }, annotations: { @@ -1045,49 +1166,50 @@ export const Trigger: coreClient.CompositeMapper = { } }; -export const TriggerSubscriptionOperationStatus: coreClient.CompositeMapper = { +export const IntegrationRuntimeReference: coreClient.CompositeMapper = { type: { name: "Composite", - className: "TriggerSubscriptionOperationStatus", + className: "IntegrationRuntimeReference", modelProperties: { - triggerName: { - serializedName: "triggerName", - readOnly: true, + type: { + serializedName: "type", + required: true, type: { name: "String" } }, - status: { - serializedName: "status", - readOnly: true, + referenceName: { + serializedName: "referenceName", + required: true, type: { name: "String" } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } } } } }; -export const TriggerRunsQueryResponse: coreClient.CompositeMapper = { +export const DataFlowStagingInfo: coreClient.CompositeMapper = { type: { name: "Composite", - className: "TriggerRunsQueryResponse", + className: "DataFlowStagingInfo", modelProperties: { - value: { - serializedName: "value", - required: true, + linkedService: { + serializedName: "linkedService", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "TriggerRun" - } - } + name: "Composite", + className: "LinkedServiceReference" } }, - continuationToken: { - serializedName: "continuationToken", + folderPath: { + serializedName: "folderPath", type: { name: "String" } @@ -1096,126 +1218,178 @@ export const TriggerRunsQueryResponse: coreClient.CompositeMapper = { } }; -export const TriggerRun: coreClient.CompositeMapper = { +export const DataFlowDebugPackageDebugSettings: coreClient.CompositeMapper = { type: { name: "Composite", - className: "TriggerRun", - additionalProperties: { type: { name: "Object" } }, + className: "DataFlowDebugPackageDebugSettings", modelProperties: { - triggerRunId: { - serializedName: "triggerRunId", - readOnly: true, + sourceSettings: { + serializedName: "sourceSettings", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowSourceSetting" + } + } } }, - triggerName: { - serializedName: "triggerName", - readOnly: true, + parameters: { + serializedName: "parameters", type: { - name: "String" + name: "Dictionary", + value: { type: { name: "any" } } } }, - triggerType: { - serializedName: "triggerType", - readOnly: true, + datasetParameters: { + serializedName: "datasetParameters", + type: { + name: "any" + } + } + } + } +}; + +export const DataFlowSourceSetting: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowSourceSetting", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + sourceName: { + serializedName: "sourceName", type: { name: "String" } }, - triggerRunTimestamp: { - serializedName: "triggerRunTimestamp", - readOnly: true, + rowLimit: { + serializedName: "rowLimit", type: { - name: "DateTime" + name: "Number" } - }, - status: { - serializedName: "status", - readOnly: true, + } + } + } +}; + +export const AddDataFlowToDebugSessionResponse: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "AddDataFlowToDebugSessionResponse", + modelProperties: { + jobVersion: { + serializedName: "jobVersion", + type: { + name: "String" + } + } + } + } +}; + +export const DeleteDataFlowDebugSessionRequest: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DeleteDataFlowDebugSessionRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", type: { name: "String" } }, - message: { - serializedName: "message", - readOnly: true, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugCommandRequest: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugCommandRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", type: { name: "String" } }, - properties: { - serializedName: "properties", - readOnly: true, + command: { + serializedName: "command", type: { - name: "Dictionary", - value: { type: { name: "String" } } + name: "String" } }, - triggeredPipelines: { - serializedName: "triggeredPipelines", - readOnly: true, + commandPayload: { + serializedName: "commandPayload", type: { - name: "Dictionary", - value: { type: { name: "String" } } + name: "Composite", + className: "DataFlowDebugCommandPayload" } } } } }; -export const DataFlow: coreClient.CompositeMapper = { +export const DataFlowDebugCommandPayload: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DataFlow", - uberParent: "DataFlow", - polymorphicDiscriminator: { - serializedName: "type", - clientName: "type" - }, + className: "DataFlowDebugCommandPayload", modelProperties: { - type: { - serializedName: "type", + streamName: { + serializedName: "streamName", required: true, type: { name: "String" } }, - description: { - serializedName: "description", + rowLimits: { + serializedName: "rowLimits", type: { - name: "String" + name: "Number" } }, - annotations: { - serializedName: "annotations", + columns: { + serializedName: "columns", type: { name: "Sequence", element: { type: { - name: "any" + name: "String" } } } }, - folder: { - serializedName: "folder", + expression: { + serializedName: "expression", type: { - name: "Composite", - className: "DataFlowFolder" + name: "String" } } } } }; -export const DataFlowFolder: coreClient.CompositeMapper = { +export const DataFlowDebugCommandResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DataFlowFolder", + className: "DataFlowDebugCommandResponse", modelProperties: { - name: { - serializedName: "name", + status: { + serializedName: "status", + type: { + name: "String" + } + }, + data: { + serializedName: "data", type: { name: "String" } @@ -1224,10 +1398,10 @@ export const DataFlowFolder: coreClient.CompositeMapper = { } }; -export const DataFlowListResponse: coreClient.CompositeMapper = { +export const DatasetListResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DataFlowListResponse", + className: "DatasetListResponse", modelProperties: { value: { serializedName: "value", @@ -1237,7 +1411,7 @@ export const DataFlowListResponse: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "DataFlowResource" + className: "DatasetResource" } } } @@ -1252,59 +1426,71 @@ export const DataFlowListResponse: coreClient.CompositeMapper = { } }; -export const CreateDataFlowDebugSessionRequest: coreClient.CompositeMapper = { +export const GitHubAccessTokenRequest: coreClient.CompositeMapper = { type: { name: "Composite", - className: "CreateDataFlowDebugSessionRequest", + className: "GitHubAccessTokenRequest", modelProperties: { - dataFlowName: { - serializedName: "dataFlowName", + gitHubClientId: { + serializedName: "gitHubClientId", + required: true, type: { name: "String" } }, - existingClusterId: { - serializedName: "existingClusterId", + gitHubAccessCode: { + serializedName: "gitHubAccessCode", + required: true, type: { name: "String" } }, - clusterTimeout: { - serializedName: "clusterTimeout", - type: { - name: "Number" - } - }, - newClusterName: { - serializedName: "newClusterName", + gitHubAccessTokenBaseUrl: { + serializedName: "gitHubAccessTokenBaseUrl", + required: true, type: { name: "String" } - }, - newClusterNodeType: { - serializedName: "newClusterNodeType", + } + } + } +}; + +export const GitHubAccessTokenResponse: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "GitHubAccessTokenResponse", + modelProperties: { + gitHubAccessToken: { + serializedName: "gitHubAccessToken", type: { name: "String" } - }, - dataBricksLinkedService: { - serializedName: "dataBricksLinkedService", - type: { - name: "Composite", - className: "LinkedServiceResource" - } } } } }; -export const CreateDataFlowDebugSessionResponse: coreClient.CompositeMapper = { +export const IntegrationRuntimeListResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "CreateDataFlowDebugSessionResponse", + className: "IntegrationRuntimeListResponse", modelProperties: { - sessionId: { - serializedName: "sessionId", + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "IntegrationRuntimeResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", type: { name: "String" } @@ -1313,19 +1499,20 @@ export const CreateDataFlowDebugSessionResponse: coreClient.CompositeMapper = { } }; -export const QueryDataFlowDebugSessionsResponse: coreClient.CompositeMapper = { +export const LibraryListResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "QueryDataFlowDebugSessionsResponse", + className: "LibraryListResponse", modelProperties: { value: { serializedName: "value", + required: true, type: { name: "Sequence", element: { type: { name: "Composite", - className: "DataFlowDebugSessionInfo" + className: "LibraryResource" } } } @@ -1340,62 +1527,56 @@ export const QueryDataFlowDebugSessionsResponse: coreClient.CompositeMapper = { } }; -export const DataFlowDebugSessionInfo: coreClient.CompositeMapper = { +export const LibraryResourceProperties: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DataFlowDebugSessionInfo", - additionalProperties: { type: { name: "Object" } }, + className: "LibraryResourceProperties", modelProperties: { - dataFlowName: { - serializedName: "dataFlowName", + name: { + serializedName: "name", + readOnly: true, type: { name: "String" } }, - computeType: { - serializedName: "computeType", - type: { - name: "String" - } - }, - coreCount: { - serializedName: "coreCount", - type: { - name: "Number" - } - }, - nodeCount: { - serializedName: "nodeCount", + path: { + serializedName: "path", + readOnly: true, type: { - name: "Number" + name: "String" } }, - integrationRuntimeName: { - serializedName: "integrationRuntimeName", + containerName: { + serializedName: "containerName", + readOnly: true, type: { name: "String" } }, - sessionId: { - serializedName: "sessionId", + uploadedTimestamp: { + serializedName: "uploadedTimestamp", + readOnly: true, type: { name: "String" } }, - startTime: { - serializedName: "startTime", + type: { + serializedName: "type", + readOnly: true, type: { name: "String" } }, - timeToLiveInMinutes: { - serializedName: "timeToLiveInMinutes", + provisioningStatus: { + serializedName: "provisioningStatus", + readOnly: true, type: { - name: "Number" + name: "String" } }, - lastActivityTime: { - serializedName: "lastActivityTime", + creatorId: { + serializedName: "creatorId", + readOnly: true, type: { name: "String" } @@ -1404,96 +1585,70 @@ export const DataFlowDebugSessionInfo: coreClient.CompositeMapper = { } }; -export const DataFlowDebugPackage: coreClient.CompositeMapper = { +export const LibraryResourceInfo: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DataFlowDebugPackage", - additionalProperties: { type: { name: "Object" } }, + className: "LibraryResourceInfo", modelProperties: { - sessionId: { - serializedName: "sessionId", + id: { + serializedName: "id", + readOnly: true, type: { name: "String" } }, - dataFlow: { - serializedName: "dataFlow", + recordId: { + serializedName: "recordId", + readOnly: true, type: { - name: "Composite", - className: "DataFlowDebugResource" + name: "Number" } }, - datasets: { - serializedName: "datasets", + state: { + serializedName: "state", + readOnly: true, type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "DatasetDebugResource" - } - } + name: "String" } }, - linkedServices: { - serializedName: "linkedServices", + created: { + serializedName: "created", + readOnly: true, type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "LinkedServiceDebugResource" - } - } + name: "String" } }, - staging: { - serializedName: "staging", + changed: { + serializedName: "changed", + readOnly: true, type: { - name: "Composite", - className: "DataFlowStagingInfo" + name: "String" } }, - debugSettings: { - serializedName: "debugSettings", + type: { + serializedName: "type", + readOnly: true, type: { - name: "Composite", - className: "DataFlowDebugPackageDebugSettings" + name: "String" } - } - } - } -}; - -export const SubResourceDebugResource: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SubResourceDebugResource", - modelProperties: { + }, name: { serializedName: "name", + readOnly: true, type: { name: "String" } - } - } - } -}; - -export const DataFlowStagingInfo: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "DataFlowStagingInfo", - modelProperties: { - linkedService: { - serializedName: "linkedService", + }, + operationId: { + serializedName: "operationId", + readOnly: true, type: { - name: "Composite", - className: "LinkedServiceReference" + name: "String" } }, - folderPath: { - serializedName: "folderPath", + artifactId: { + serializedName: "artifactId", + readOnly: true, type: { name: "String" } @@ -1502,69 +1657,72 @@ export const DataFlowStagingInfo: coreClient.CompositeMapper = { } }; -export const DataFlowDebugPackageDebugSettings: coreClient.CompositeMapper = { +export const OperationResult: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DataFlowDebugPackageDebugSettings", + className: "OperationResult", modelProperties: { - sourceSettings: { - serializedName: "sourceSettings", + status: { + serializedName: "status", + readOnly: true, type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "DataFlowSourceSetting" - } - } + name: "String" } }, - parameters: { - serializedName: "parameters", + code: { + serializedName: "error.code", type: { - name: "Dictionary", - value: { type: { name: "any" } } + name: "String" } }, - datasetParameters: { - serializedName: "datasetParameters", + message: { + serializedName: "error.message", type: { - name: "any" + name: "String" } - } - } - } -}; - -export const DataFlowSourceSetting: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "DataFlowSourceSetting", - additionalProperties: { type: { name: "Object" } }, - modelProperties: { - sourceName: { - serializedName: "sourceName", + }, + target: { + serializedName: "error.target", type: { name: "String" } }, - rowLimit: { - serializedName: "rowLimit", + details: { + serializedName: "error.details", type: { - name: "Number" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CloudErrorAutoGenerated" + } + } } } } } }; -export const AddDataFlowToDebugSessionResponse: coreClient.CompositeMapper = { +export const LinkedServiceListResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "AddDataFlowToDebugSessionResponse", + className: "LinkedServiceListResponse", modelProperties: { - jobVersion: { - serializedName: "jobVersion", + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", type: { name: "String" } @@ -1573,19 +1731,26 @@ export const AddDataFlowToDebugSessionResponse: coreClient.CompositeMapper = { } }; -export const DeleteDataFlowDebugSessionRequest: coreClient.CompositeMapper = { +export const NotebookListResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DeleteDataFlowDebugSessionRequest", + className: "NotebookListResponse", modelProperties: { - sessionId: { - serializedName: "sessionId", + value: { + serializedName: "value", + required: true, type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "NotebookResource" + } + } } }, - dataFlowName: { - serializedName: "dataFlowName", + nextLink: { + serializedName: "nextLink", type: { name: "String" } @@ -1594,209 +1759,227 @@ export const DeleteDataFlowDebugSessionRequest: coreClient.CompositeMapper = { } }; -export const DataFlowDebugCommandRequest: coreClient.CompositeMapper = { +export const NotebookResource: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DataFlowDebugCommandRequest", + className: "NotebookResource", modelProperties: { - sessionId: { - serializedName: "sessionId", - required: true, + id: { + serializedName: "id", + readOnly: true, type: { name: "String" } }, - dataFlowName: { - serializedName: "dataFlowName", + name: { + serializedName: "name", + required: true, type: { name: "String" } }, - commandName: { - serializedName: "commandName", + type: { + serializedName: "type", + readOnly: true, type: { name: "String" } }, - commandPayload: { - serializedName: "commandPayload", - required: true, + etag: { + serializedName: "etag", + readOnly: true, type: { - name: "any" + name: "String" + } + }, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "Notebook" } } } } }; -export const DataFlowDebugCommandResponse: coreClient.CompositeMapper = { +export const Notebook: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DataFlowDebugCommandResponse", + className: "Notebook", + additionalProperties: { type: { name: "Object" } }, modelProperties: { - status: { - serializedName: "status", + description: { + serializedName: "description", type: { name: "String" } }, - data: { - serializedName: "data", + bigDataPool: { + serializedName: "bigDataPool", type: { - name: "String" + name: "Composite", + className: "BigDataPoolReference" } - } - } - } -}; - -export const SqlScriptsListResponse: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SqlScriptsListResponse", - modelProperties: { - value: { - serializedName: "value", + }, + sessionProperties: { + serializedName: "sessionProperties", + type: { + name: "Composite", + className: "NotebookSessionProperties" + } + }, + metadata: { + serializedName: "metadata", + type: { + name: "Composite", + className: "NotebookMetadata" + } + }, + nbformat: { + serializedName: "nbformat", + required: true, + type: { + name: "Number" + } + }, + nbformatMinor: { + serializedName: "nbformat_minor", + required: true, + type: { + name: "Number" + } + }, + cells: { + serializedName: "cells", required: true, type: { name: "Sequence", element: { type: { name: "Composite", - className: "SqlScriptResource" + className: "NotebookCell" } } } }, - nextLink: { - serializedName: "nextLink", + folder: { + serializedName: "folder", type: { - name: "String" + name: "Composite", + className: "NotebookFolder" } } } } }; -export const SqlScriptResource: coreClient.CompositeMapper = { +export const BigDataPoolReference: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SqlScriptResource", + className: "BigDataPoolReference", modelProperties: { - id: { - serializedName: "id", - readOnly: true, - type: { - name: "String" - } - }, - name: { - serializedName: "name", - required: true, - type: { - name: "String" - } - }, type: { serializedName: "type", - readOnly: true, + required: true, type: { name: "String" } }, - etag: { - serializedName: "etag", - readOnly: true, + referenceName: { + serializedName: "referenceName", + required: true, type: { name: "String" } - }, - properties: { - serializedName: "properties", - type: { - name: "Composite", - className: "SqlScript" - } } } } }; -export const SqlScript: coreClient.CompositeMapper = { +export const NotebookSessionProperties: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SqlScript", - additionalProperties: { type: { name: "Object" } }, + className: "NotebookSessionProperties", modelProperties: { - description: { - serializedName: "description", + driverMemory: { + serializedName: "driverMemory", + required: true, type: { name: "String" } }, - type: { - serializedName: "type", + driverCores: { + serializedName: "driverCores", + required: true, + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + required: true, type: { name: "String" } }, - content: { - serializedName: "content", + executorCores: { + serializedName: "executorCores", + required: true, type: { - name: "Composite", - className: "SqlScriptContent" + name: "Number" + } + }, + numExecutors: { + serializedName: "numExecutors", + required: true, + type: { + name: "Number" } } } } }; -export const SqlScriptContent: coreClient.CompositeMapper = { +export const NotebookMetadata: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SqlScriptContent", + className: "NotebookMetadata", additionalProperties: { type: { name: "Object" } }, modelProperties: { - query: { - serializedName: "query", - required: true, - type: { - name: "String" - } - }, - currentConnection: { - serializedName: "currentConnection", + kernelspec: { + serializedName: "kernelspec", type: { name: "Composite", - className: "SqlConnection" + className: "NotebookKernelSpec" } }, - metadata: { - serializedName: "metadata", + languageInfo: { + serializedName: "language_info", type: { name: "Composite", - className: "SqlScriptMetadata" + className: "NotebookLanguageInfo" } } } } }; -export const SqlConnection: coreClient.CompositeMapper = { +export const NotebookKernelSpec: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SqlConnection", + className: "NotebookKernelSpec", additionalProperties: { type: { name: "Object" } }, modelProperties: { - type: { - serializedName: "type", + name: { + serializedName: "name", required: true, type: { name: "String" } }, - name: { - serializedName: "name", + displayName: { + serializedName: "display_name", required: true, type: { name: "String" @@ -1806,14 +1989,21 @@ export const SqlConnection: coreClient.CompositeMapper = { } }; -export const SqlScriptMetadata: coreClient.CompositeMapper = { +export const NotebookLanguageInfo: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SqlScriptMetadata", + className: "NotebookLanguageInfo", additionalProperties: { type: { name: "Object" } }, modelProperties: { - language: { - serializedName: "language", + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + codemirrorMode: { + serializedName: "codemirror_mode", type: { name: "String" } @@ -1822,91 +2012,142 @@ export const SqlScriptMetadata: coreClient.CompositeMapper = { } }; -export const SparkJobDefinitionsListResponse: coreClient.CompositeMapper = { +export const NotebookCell: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SparkJobDefinitionsListResponse", + className: "NotebookCell", + additionalProperties: { type: { name: "Object" } }, modelProperties: { - value: { - serializedName: "value", + cellType: { + serializedName: "cell_type", + required: true, + type: { + name: "String" + } + }, + metadata: { + serializedName: "metadata", + required: true, + type: { + name: "any" + } + }, + source: { + serializedName: "source", required: true, type: { name: "Sequence", element: { type: { - name: "Composite", - className: "SparkJobDefinitionResource" + name: "String" } } } }, - nextLink: { - serializedName: "nextLink", + attachments: { + serializedName: "attachments", + nullable: true, type: { - name: "String" + name: "any" + } + }, + outputs: { + serializedName: "outputs", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "NotebookCellOutputItem" + } + } } } } } }; -export const SparkJobDefinition: coreClient.CompositeMapper = { +export const NotebookCellOutputItem: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SparkJobDefinition", - additionalProperties: { type: { name: "Object" } }, + className: "NotebookCellOutputItem", modelProperties: { - description: { - serializedName: "description", + name: { + serializedName: "name", type: { name: "String" } }, - targetBigDataPool: { - serializedName: "targetBigDataPool", + executionCount: { + serializedName: "execution_count", type: { - name: "Composite", - className: "BigDataPoolReference" + name: "Number" } }, - requiredSparkVersion: { - serializedName: "requiredSparkVersion", + outputType: { + serializedName: "output_type", + required: true, type: { name: "String" } }, - language: { - serializedName: "language", + text: { + serializedName: "text", type: { - name: "String" + name: "any" } }, - jobProperties: { - serializedName: "jobProperties", + data: { + serializedName: "data", type: { - name: "Composite", - className: "SparkJobProperties" + name: "any" + } + }, + metadata: { + serializedName: "metadata", + type: { + name: "any" } } } } }; -export const BigDataPoolReference: coreClient.CompositeMapper = { +export const NotebookFolder: coreClient.CompositeMapper = { type: { name: "Composite", - className: "BigDataPoolReference", + className: "NotebookFolder", modelProperties: { - type: { - serializedName: "type", - required: true, + name: { + serializedName: "name", type: { name: "String" } - }, - referenceName: { - serializedName: "referenceName", + } + } + } +}; + +export const PipelineListResponse: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineListResponse", + modelProperties: { + value: { + serializedName: "value", required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PipelineResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", type: { name: "String" } @@ -1915,72 +2156,82 @@ export const BigDataPoolReference: coreClient.CompositeMapper = { } }; -export const SparkJobProperties: coreClient.CompositeMapper = { +export const Activity: coreClient.CompositeMapper = { + serializedName: "Activity", type: { name: "Composite", - className: "SparkJobProperties", + className: "Activity", + uberParent: "Activity", additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, modelProperties: { name: { serializedName: "name", + required: true, type: { name: "String" } }, - file: { - serializedName: "file", + type: { + serializedName: "type", required: true, type: { name: "String" } }, - className: { - serializedName: "className", + description: { + serializedName: "description", type: { name: "String" } }, - conf: { - serializedName: "conf", - type: { - name: "any" - } - }, - args: { - serializedName: "args", + dependsOn: { + serializedName: "dependsOn", type: { name: "Sequence", element: { type: { - name: "String" + name: "Composite", + className: "ActivityDependency" } } } }, - jars: { - serializedName: "jars", + userProperties: { + serializedName: "userProperties", type: { name: "Sequence", element: { type: { - name: "String" + name: "Composite", + className: "UserProperty" } } } - }, - files: { - serializedName: "files", + } + } + } +}; + +export const ActivityDependency: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "ActivityDependency", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + activity: { + serializedName: "activity", + required: true, type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } + name: "String" } }, - archives: { - serializedName: "archives", + dependencyConditions: { + serializedName: "dependencyConditions", + required: true, type: { name: "Sequence", element: { @@ -1989,497 +2240,485 @@ export const SparkJobProperties: coreClient.CompositeMapper = { } } } - }, - driverMemory: { - serializedName: "driverMemory", + } + } + } +}; + +export const UserProperty: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "UserProperty", + modelProperties: { + name: { + serializedName: "name", required: true, type: { name: "String" } }, - driverCores: { - serializedName: "driverCores", + value: { + serializedName: "value", required: true, type: { - name: "Number" + name: "any" } - }, - executorMemory: { - serializedName: "executorMemory", + } + } + } +}; + +export const VariableSpecification: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "VariableSpecification", + modelProperties: { + type: { + serializedName: "type", required: true, type: { name: "String" } }, - executorCores: { - serializedName: "executorCores", - required: true, - type: { - name: "Number" - } - }, - numExecutors: { - serializedName: "numExecutors", - required: true, + defaultValue: { + serializedName: "defaultValue", type: { - name: "Number" + name: "any" } } } } }; -export const SparkBatchJob: coreClient.CompositeMapper = { +export const PipelineFolder: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SparkBatchJob", + className: "PipelineFolder", modelProperties: { - livyInfo: { - serializedName: "livyInfo", - type: { - name: "Composite", - className: "SparkBatchJobState" - } - }, name: { serializedName: "name", type: { name: "String" } - }, - workspaceName: { - serializedName: "workspaceName", + } + } + } +}; + +export const CreateRunResponse: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "CreateRunResponse", + modelProperties: { + runId: { + serializedName: "runId", + required: true, type: { name: "String" } - }, - sparkPoolName: { - serializedName: "sparkPoolName", + } + } + } +}; + +export const RunFilterParameters: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "RunFilterParameters", + modelProperties: { + continuationToken: { + serializedName: "continuationToken", type: { name: "String" } }, - submitterName: { - serializedName: "submitterName", + lastUpdatedAfter: { + serializedName: "lastUpdatedAfter", + required: true, type: { - name: "String" + name: "DateTime" } }, - submitterId: { - serializedName: "submitterId", + lastUpdatedBefore: { + serializedName: "lastUpdatedBefore", + required: true, type: { - name: "String" + name: "DateTime" } }, - artifactId: { - serializedName: "artifactId", + filters: { + serializedName: "filters", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "RunQueryFilter" + } + } } }, - jobType: { - serializedName: "jobType", + orderBy: { + serializedName: "orderBy", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "RunQueryOrderBy" + } + } } - }, - result: { - serializedName: "result", + } + } + } +}; + +export const RunQueryFilter: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "RunQueryFilter", + modelProperties: { + operand: { + serializedName: "operand", + required: true, type: { name: "String" } }, - scheduler: { - serializedName: "schedulerInfo", - type: { - name: "Composite", - className: "SparkScheduler" - } - }, - plugin: { - serializedName: "pluginInfo", + operator: { + serializedName: "operator", + required: true, type: { - name: "Composite", - className: "SparkServicePlugin" + name: "String" } }, - errors: { - serializedName: "errorInfo", + values: { + serializedName: "values", + required: true, type: { name: "Sequence", element: { type: { - name: "Composite", - className: "SparkServiceError" + name: "String" } } } - }, - tags: { - serializedName: "tags", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - id: { - serializedName: "id", + } + } + } +}; + +export const RunQueryOrderBy: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "RunQueryOrderBy", + modelProperties: { + orderBy: { + serializedName: "orderBy", required: true, - type: { - name: "Number" - } - }, - appId: { - serializedName: "appId", - nullable: true, type: { name: "String" } }, - appInfo: { - serializedName: "appInfo", - nullable: true, - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - state: { - serializedName: "state", + order: { + serializedName: "order", + required: true, type: { name: "String" } - }, - logLines: { - serializedName: "log", - nullable: true, + } + } + } +}; + +export const PipelineRunsQueryResponse: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineRunsQueryResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, type: { name: "Sequence", element: { type: { - name: "String" + name: "Composite", + className: "PipelineRun" } } } + }, + continuationToken: { + serializedName: "continuationToken", + type: { + name: "String" + } } } } }; -export const SparkBatchJobState: coreClient.CompositeMapper = { +export const PipelineRun: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SparkBatchJobState", + className: "PipelineRun", + additionalProperties: { type: { name: "Object" } }, modelProperties: { - notStartedAt: { - serializedName: "notStartedAt", - nullable: true, + runId: { + serializedName: "runId", + readOnly: true, type: { - name: "DateTime" + name: "String" } }, - startingAt: { - serializedName: "startingAt", - nullable: true, + runGroupId: { + serializedName: "runGroupId", + readOnly: true, type: { - name: "DateTime" + name: "String" } }, - runningAt: { - serializedName: "runningAt", - nullable: true, + isLatest: { + serializedName: "isLatest", + readOnly: true, type: { - name: "DateTime" + name: "Boolean" } }, - deadAt: { - serializedName: "deadAt", - nullable: true, + pipelineName: { + serializedName: "pipelineName", + readOnly: true, type: { - name: "DateTime" + name: "String" } }, - successAt: { - serializedName: "successAt", - nullable: true, + parameters: { + serializedName: "parameters", + readOnly: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + invokedBy: { + serializedName: "invokedBy", + type: { + name: "Composite", + className: "PipelineRunInvokedBy" + } + }, + lastUpdated: { + serializedName: "lastUpdated", + readOnly: true, type: { name: "DateTime" } }, - terminatedAt: { - serializedName: "killedAt", - nullable: true, + runStart: { + serializedName: "runStart", + readOnly: true, type: { name: "DateTime" } }, - recoveringAt: { - serializedName: "recoveringAt", + runEnd: { + serializedName: "runEnd", + readOnly: true, nullable: true, type: { name: "DateTime" } }, - currentState: { - serializedName: "currentState", + durationInMs: { + serializedName: "durationInMs", + readOnly: true, + type: { + name: "Number" + } + }, + status: { + serializedName: "status", + readOnly: true, type: { name: "String" } }, - jobCreationRequest: { - serializedName: "jobCreationRequest", + message: { + serializedName: "message", + readOnly: true, type: { - name: "Composite", - className: "SparkRequest" + name: "String" } } } } }; -export const SparkRequest: coreClient.CompositeMapper = { +export const PipelineRunInvokedBy: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SparkRequest", + className: "PipelineRunInvokedBy", modelProperties: { name: { serializedName: "name", + readOnly: true, type: { name: "String" } }, - file: { - serializedName: "file", + id: { + serializedName: "id", + readOnly: true, type: { name: "String" } }, - className: { - serializedName: "className", + invokedByType: { + serializedName: "invokedByType", + readOnly: true, type: { name: "String" } - }, - arguments: { - serializedName: "args", + } + } + } +}; + +export const ActivityRunsQueryResponse: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "ActivityRunsQueryResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, type: { name: "Sequence", element: { type: { - name: "String" + name: "Composite", + className: "ActivityRun" } } } }, - jars: { - serializedName: "jars", + continuationToken: { + serializedName: "continuationToken", type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } - }, - pythonFiles: { - serializedName: "pyFiles", - type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } - }, - files: { - serializedName: "files", - type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } - }, - archives: { - serializedName: "archives", - type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } - }, - configuration: { - serializedName: "conf", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - driverMemory: { - serializedName: "driverMemory", - type: { - name: "String" - } - }, - driverCores: { - serializedName: "driverCores", - type: { - name: "Number" - } - }, - executorMemory: { - serializedName: "executorMemory", - type: { - name: "String" - } - }, - executorCores: { - serializedName: "executorCores", - type: { - name: "Number" - } - }, - executorCount: { - serializedName: "numExecutors", - type: { - name: "Number" + name: "String" } } } } }; -export const SparkScheduler: coreClient.CompositeMapper = { +export const ActivityRun: coreClient.CompositeMapper = { type: { name: "Composite", - className: "SparkScheduler", + className: "ActivityRun", + additionalProperties: { type: { name: "Object" } }, modelProperties: { - submittedAt: { - serializedName: "submittedAt", - nullable: true, + pipelineName: { + serializedName: "pipelineName", + readOnly: true, type: { - name: "DateTime" + name: "String" } }, - scheduledAt: { - serializedName: "scheduledAt", - nullable: true, + pipelineRunId: { + serializedName: "pipelineRunId", + readOnly: true, type: { - name: "DateTime" + name: "String" } }, - endedAt: { - serializedName: "endedAt", - nullable: true, + activityName: { + serializedName: "activityName", + readOnly: true, type: { - name: "DateTime" + name: "String" } }, - cancellationRequestedAt: { - serializedName: "cancellationRequestedAt", - nullable: true, + activityType: { + serializedName: "activityType", + readOnly: true, type: { - name: "DateTime" + name: "String" } }, - currentState: { - serializedName: "currentState", + activityRunId: { + serializedName: "activityRunId", + readOnly: true, type: { name: "String" } - } - } - } -}; - -export const SparkServicePlugin: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SparkServicePlugin", - modelProperties: { - preparationStartedAt: { - serializedName: "preparationStartedAt", - nullable: true, - type: { - name: "DateTime" - } }, - resourceAcquisitionStartedAt: { - serializedName: "resourceAcquisitionStartedAt", - nullable: true, + linkedServiceName: { + serializedName: "linkedServiceName", + readOnly: true, type: { - name: "DateTime" + name: "String" } }, - submissionStartedAt: { - serializedName: "submissionStartedAt", - nullable: true, + status: { + serializedName: "status", + readOnly: true, type: { - name: "DateTime" + name: "String" } }, - monitoringStartedAt: { - serializedName: "monitoringStartedAt", - nullable: true, + activityRunStart: { + serializedName: "activityRunStart", + readOnly: true, type: { name: "DateTime" } }, - cleanupStartedAt: { - serializedName: "cleanupStartedAt", - nullable: true, + activityRunEnd: { + serializedName: "activityRunEnd", + readOnly: true, type: { name: "DateTime" } }, - currentState: { - serializedName: "currentState", + durationInMs: { + serializedName: "durationInMs", + readOnly: true, type: { - name: "String" + name: "Number" } - } - } - } -}; - -export const SparkServiceError: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SparkServiceError", - modelProperties: { - message: { - serializedName: "message", + }, + input: { + serializedName: "input", + readOnly: true, type: { - name: "String" + name: "any" } }, - errorCode: { - serializedName: "errorCode", + output: { + serializedName: "output", + readOnly: true, type: { - name: "String" + name: "any" } }, - source: { - serializedName: "source", + error: { + serializedName: "error", + readOnly: true, type: { - name: "String" + name: "any" } } } } }; -export const NotebookListResponse: coreClient.CompositeMapper = { +export const SparkJobDefinitionsListResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "NotebookListResponse", + className: "SparkJobDefinitionsListResponse", modelProperties: { value: { serializedName: "value", @@ -2489,7 +2728,7 @@ export const NotebookListResponse: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "NotebookResource" + className: "SparkJobDefinitionResource" } } } @@ -2504,119 +2743,130 @@ export const NotebookListResponse: coreClient.CompositeMapper = { } }; -export const NotebookResource: coreClient.CompositeMapper = { +export const SparkJobDefinition: coreClient.CompositeMapper = { type: { name: "Composite", - className: "NotebookResource", + className: "SparkJobDefinition", + additionalProperties: { type: { name: "Object" } }, modelProperties: { - id: { - serializedName: "id", - readOnly: true, + description: { + serializedName: "description", type: { name: "String" } }, - name: { - serializedName: "name", - required: true, + targetBigDataPool: { + serializedName: "targetBigDataPool", type: { - name: "String" + name: "Composite", + className: "BigDataPoolReference" } }, - type: { - serializedName: "type", - readOnly: true, + requiredSparkVersion: { + serializedName: "requiredSparkVersion", type: { name: "String" } }, - etag: { - serializedName: "etag", - readOnly: true, + language: { + serializedName: "language", type: { name: "String" } }, - properties: { - serializedName: "properties", + jobProperties: { + serializedName: "jobProperties", type: { name: "Composite", - className: "Notebook" + className: "SparkJobProperties" + } + }, + folder: { + serializedName: "folder", + type: { + name: "Composite", + className: "SparkJobDefinitionFolder" } } } } }; -export const Notebook: coreClient.CompositeMapper = { +export const SparkJobProperties: coreClient.CompositeMapper = { type: { name: "Composite", - className: "Notebook", + className: "SparkJobProperties", additionalProperties: { type: { name: "Object" } }, modelProperties: { - description: { - serializedName: "description", + name: { + serializedName: "name", type: { name: "String" } }, - bigDataPool: { - serializedName: "bigDataPool", + file: { + serializedName: "file", + required: true, type: { - name: "Composite", - className: "BigDataPoolReference" + name: "String" } }, - sessionProperties: { - serializedName: "sessionProperties", + className: { + serializedName: "className", type: { - name: "Composite", - className: "NotebookSessionProperties" + name: "String" } }, - metadata: { - serializedName: "metadata", + conf: { + serializedName: "conf", type: { - name: "Composite", - className: "NotebookMetadata" + name: "any" } }, - nbformat: { - serializedName: "nbformat", - required: true, + args: { + serializedName: "args", type: { - name: "Number" + name: "Sequence", + element: { + type: { + name: "String" + } + } } }, - nbformatMinor: { - serializedName: "nbformat_minor", - required: true, + jars: { + serializedName: "jars", type: { - name: "Number" + name: "Sequence", + element: { + type: { + name: "String" + } + } } }, - cells: { - serializedName: "cells", - required: true, + files: { + serializedName: "files", type: { name: "Sequence", element: { type: { - name: "Composite", - className: "NotebookCell" + name: "String" } } } - } - } - } -}; - -export const NotebookSessionProperties: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "NotebookSessionProperties", - modelProperties: { + }, + archives: { + serializedName: "archives", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, driverMemory: { serializedName: "driverMemory", required: true, @@ -2656,308 +2906,233 @@ export const NotebookSessionProperties: coreClient.CompositeMapper = { } }; -export const NotebookMetadata: coreClient.CompositeMapper = { +export const SparkJobDefinitionFolder: coreClient.CompositeMapper = { type: { name: "Composite", - className: "NotebookMetadata", - additionalProperties: { type: { name: "Object" } }, + className: "SparkJobDefinitionFolder", modelProperties: { - kernelspec: { - serializedName: "kernelspec", - type: { - name: "Composite", - className: "NotebookKernelSpec" - } - }, - languageInfo: { - serializedName: "language_info", + name: { + serializedName: "name", type: { - name: "Composite", - className: "NotebookLanguageInfo" + name: "String" } } } } }; -export const NotebookKernelSpec: coreClient.CompositeMapper = { +export const SparkBatchJob: coreClient.CompositeMapper = { type: { name: "Composite", - className: "NotebookKernelSpec", - additionalProperties: { type: { name: "Object" } }, + className: "SparkBatchJob", modelProperties: { + livyInfo: { + serializedName: "livyInfo", + type: { + name: "Composite", + className: "SparkBatchJobState" + } + }, name: { serializedName: "name", - required: true, type: { name: "String" } }, - displayName: { - serializedName: "display_name", - required: true, + workspaceName: { + serializedName: "workspaceName", type: { name: "String" } - } - } - } -}; - -export const NotebookLanguageInfo: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "NotebookLanguageInfo", - additionalProperties: { type: { name: "Object" } }, - modelProperties: { - name: { - serializedName: "name", - required: true, + }, + sparkPoolName: { + serializedName: "sparkPoolName", type: { name: "String" } }, - codemirrorMode: { - serializedName: "codemirror_mode", + submitterName: { + serializedName: "submitterName", type: { name: "String" } - } - } - } -}; - -export const NotebookCell: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "NotebookCell", - additionalProperties: { type: { name: "Object" } }, - modelProperties: { - cellType: { - serializedName: "cell_type", - required: true, + }, + submitterId: { + serializedName: "submitterId", type: { name: "String" } }, - metadata: { - serializedName: "metadata", - required: true, + artifactId: { + serializedName: "artifactId", type: { - name: "any" + name: "String" } }, - source: { - serializedName: "source", - required: true, + jobType: { + serializedName: "jobType", type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } + name: "String" } }, - attachments: { - serializedName: "attachments", - nullable: true, + result: { + serializedName: "result", type: { - name: "any" + name: "String" } }, - outputs: { - serializedName: "outputs", + scheduler: { + serializedName: "schedulerInfo", + type: { + name: "Composite", + className: "SparkScheduler" + } + }, + plugin: { + serializedName: "pluginInfo", + type: { + name: "Composite", + className: "SparkServicePlugin" + } + }, + errors: { + serializedName: "errorInfo", type: { name: "Sequence", element: { type: { name: "Composite", - className: "NotebookCellOutputItem" + className: "SparkServiceError" } } } - } - } - } -}; - -export const NotebookCellOutputItem: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "NotebookCellOutputItem", - modelProperties: { - name: { - serializedName: "name", + }, + tags: { + serializedName: "tags", type: { - name: "String" + name: "Dictionary", + value: { type: { name: "String" } } } }, - executionCount: { - serializedName: "execution_count", + id: { + serializedName: "id", + required: true, type: { name: "Number" } }, - outputType: { - serializedName: "output_type", - required: true, + appId: { + serializedName: "appId", + nullable: true, type: { name: "String" } }, - text: { - serializedName: "text", + appInfo: { + serializedName: "appInfo", + nullable: true, type: { - name: "any" + name: "Dictionary", + value: { type: { name: "String" } } } }, - data: { - serializedName: "data", + state: { + serializedName: "state", type: { - name: "any" + name: "String" } }, - metadata: { - serializedName: "metadata", + logLines: { + serializedName: "log", + nullable: true, type: { - name: "any" + name: "Sequence", + element: { + type: { + name: "String" + } + } } } } } }; -export const DataLakeStorageAccountDetails: coreClient.CompositeMapper = { +export const SparkBatchJobState: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DataLakeStorageAccountDetails", + className: "SparkBatchJobState", modelProperties: { - accountUrl: { - serializedName: "accountUrl", + notStartedAt: { + serializedName: "notStartedAt", + nullable: true, type: { - name: "String" + name: "DateTime" } }, - filesystem: { - serializedName: "filesystem", + startingAt: { + serializedName: "startingAt", + nullable: true, type: { - name: "String" + name: "DateTime" } - } - } - } -}; - -export const VirtualNetworkProfile: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "VirtualNetworkProfile", - modelProperties: { - computeSubnetId: { - serializedName: "computeSubnetId", - type: { - name: "String" - } - } - } - } -}; - -export const PrivateEndpoint: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "PrivateEndpoint", - modelProperties: { - id: { - serializedName: "id", - readOnly: true, - type: { - name: "String" - } - } - } - } -}; - -export const PrivateLinkServiceConnectionState: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "PrivateLinkServiceConnectionState", - modelProperties: { - status: { - serializedName: "status", + }, + runningAt: { + serializedName: "runningAt", + nullable: true, type: { - name: "String" + name: "DateTime" } }, - description: { - serializedName: "description", + deadAt: { + serializedName: "deadAt", + nullable: true, type: { - name: "String" + name: "DateTime" } }, - actionsRequired: { - serializedName: "actionsRequired", - readOnly: true, + successAt: { + serializedName: "successAt", + nullable: true, type: { - name: "String" + name: "DateTime" } - } - } - } -}; - -export const EncryptionDetails: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "EncryptionDetails", - modelProperties: { - doubleEncryptionEnabled: { - serializedName: "doubleEncryptionEnabled", - readOnly: true, + }, + terminatedAt: { + serializedName: "killedAt", + nullable: true, type: { - name: "Boolean" + name: "DateTime" } }, - cmk: { - serializedName: "cmk", + recoveringAt: { + serializedName: "recoveringAt", + nullable: true, type: { - name: "Composite", - className: "CustomerManagedKeyDetails" + name: "DateTime" } - } - } - } -}; - -export const CustomerManagedKeyDetails: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "CustomerManagedKeyDetails", - modelProperties: { - status: { - serializedName: "status", - readOnly: true, + }, + currentState: { + serializedName: "currentState", type: { name: "String" } }, - key: { - serializedName: "key", + jobCreationRequest: { + serializedName: "jobCreationRequest", type: { name: "Composite", - className: "WorkspaceKeyDetails" + className: "SparkRequest" } } } } }; -export const WorkspaceKeyDetails: coreClient.CompositeMapper = { +export const SparkRequest: coreClient.CompositeMapper = { type: { name: "Composite", - className: "WorkspaceKeyDetails", + className: "SparkRequest", modelProperties: { name: { serializedName: "name", @@ -2965,35 +3140,20 @@ export const WorkspaceKeyDetails: coreClient.CompositeMapper = { name: "String" } }, - keyVaultUrl: { - serializedName: "keyVaultUrl", + file: { + serializedName: "file", type: { name: "String" } - } - } - } -}; - -export const ManagedVirtualNetworkSettings: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "ManagedVirtualNetworkSettings", - modelProperties: { - preventDataExfiltration: { - serializedName: "preventDataExfiltration", - type: { - name: "Boolean" - } }, - linkedAccessCheckOnTargetResource: { - serializedName: "linkedAccessCheckOnTargetResource", + className: { + serializedName: "className", type: { - name: "Boolean" + name: "String" } }, - allowedAadTenantIdsForLinking: { - serializedName: "allowedAadTenantIdsForLinking", + arguments: { + serializedName: "args", type: { name: "Sequence", element: { @@ -3002,242 +3162,206 @@ export const ManagedVirtualNetworkSettings: coreClient.CompositeMapper = { } } } - } - } - } -}; - -export const WorkspaceRepositoryConfiguration: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "WorkspaceRepositoryConfiguration", - modelProperties: { - type: { - serializedName: "type", - type: { - name: "String" - } }, - hostName: { - serializedName: "hostName", + jars: { + serializedName: "jars", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "String" + } + } } }, - accountName: { - serializedName: "accountName", + pythonFiles: { + serializedName: "pyFiles", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "String" + } + } } }, - projectName: { - serializedName: "projectName", + files: { + serializedName: "files", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "String" + } + } } }, - repositoryName: { - serializedName: "repositoryName", + archives: { + serializedName: "archives", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "String" + } + } } }, - collaborationBranch: { - serializedName: "collaborationBranch", + configuration: { + serializedName: "conf", type: { - name: "String" + name: "Dictionary", + value: { type: { name: "String" } } } }, - rootFolder: { - serializedName: "rootFolder", + driverMemory: { + serializedName: "driverMemory", type: { name: "String" } }, - lastCommitId: { - serializedName: "lastCommitId", + driverCores: { + serializedName: "driverCores", type: { - name: "String" + name: "Number" } }, - tenantId: { - serializedName: "tenantId", + executorMemory: { + serializedName: "executorMemory", type: { - name: "Uuid" + name: "String" } }, - clientId: { - serializedName: "clientId", + executorCores: { + serializedName: "executorCores", type: { - name: "String" + name: "Number" } }, - clientSecret: { - serializedName: "clientSecret", + executorCount: { + serializedName: "numExecutors", type: { - name: "Composite", - className: "GitHubClientSecret" + name: "Number" } } } } }; -export const GitHubClientSecret: coreClient.CompositeMapper = { +export const SparkScheduler: coreClient.CompositeMapper = { type: { name: "Composite", - className: "GitHubClientSecret", + className: "SparkScheduler", modelProperties: { - byoaSecretAkvUrl: { - serializedName: "byoaSecretAkvUrl", + submittedAt: { + serializedName: "submittedAt", + nullable: true, type: { - name: "String" + name: "DateTime" } }, - byoaSecretName: { - serializedName: "byoaSecretName", + scheduledAt: { + serializedName: "scheduledAt", + nullable: true, type: { - name: "String" + name: "DateTime" } - } - } - } -}; - -export const PurviewConfiguration: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "PurviewConfiguration", - modelProperties: { - purviewResourceId: { - serializedName: "purviewResourceId", - type: { - name: "String" - } - } - } - } -}; - -export const ManagedIdentity: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "ManagedIdentity", - modelProperties: { - principalId: { - serializedName: "principalId", - readOnly: true, + }, + endedAt: { + serializedName: "endedAt", + nullable: true, type: { - name: "String" + name: "DateTime" } }, - tenantId: { - serializedName: "tenantId", - readOnly: true, + cancellationRequestedAt: { + serializedName: "cancellationRequestedAt", + nullable: true, type: { - name: "Uuid" + name: "DateTime" } }, - type: { - serializedName: "type", + currentState: { + serializedName: "currentState", type: { - name: "Enum", - allowedValues: ["None", "SystemAssigned"] + name: "String" } } } } }; -export const ErrorContract: coreClient.CompositeMapper = { +export const SparkServicePlugin: coreClient.CompositeMapper = { type: { name: "Composite", - className: "ErrorContract", + className: "SparkServicePlugin", modelProperties: { - error: { - serializedName: "error", + preparationStartedAt: { + serializedName: "preparationStartedAt", + nullable: true, type: { - name: "Composite", - className: "ErrorResponse" + name: "DateTime" } - } - } - } -}; - -export const ErrorResponse: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "ErrorResponse", - modelProperties: { - code: { - serializedName: "code", - readOnly: true, + }, + resourceAcquisitionStartedAt: { + serializedName: "resourceAcquisitionStartedAt", + nullable: true, type: { - name: "String" + name: "DateTime" } }, - message: { - serializedName: "message", - readOnly: true, + submissionStartedAt: { + serializedName: "submissionStartedAt", + nullable: true, type: { - name: "String" + name: "DateTime" } }, - target: { - serializedName: "target", - readOnly: true, + monitoringStartedAt: { + serializedName: "monitoringStartedAt", + nullable: true, type: { - name: "String" + name: "DateTime" } }, - details: { - serializedName: "details", - readOnly: true, + cleanupStartedAt: { + serializedName: "cleanupStartedAt", + nullable: true, type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ErrorResponse" - } - } + name: "DateTime" } }, - additionalInfo: { - serializedName: "additionalInfo", - readOnly: true, + currentState: { + serializedName: "currentState", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ErrorAdditionalInfo" - } - } + name: "String" } } } } }; -export const ErrorAdditionalInfo: coreClient.CompositeMapper = { +export const SparkServiceError: coreClient.CompositeMapper = { type: { name: "Composite", - className: "ErrorAdditionalInfo", + className: "SparkServiceError", modelProperties: { - type: { - serializedName: "type", - readOnly: true, + message: { + serializedName: "message", type: { name: "String" } }, - info: { - serializedName: "info", - readOnly: true, + errorCode: { + serializedName: "errorCode", type: { - name: "any" + name: "String" + } + }, + source: { + serializedName: "source", + type: { + name: "String" } } } @@ -3298,170 +3422,177 @@ export const Sku: coreClient.CompositeMapper = { } }; -export const BigDataPoolResourceInfoListResult: coreClient.CompositeMapper = { +export const SqlScriptsListResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "BigDataPoolResourceInfoListResult", + className: "SqlScriptsListResponse", modelProperties: { - nextLink: { - serializedName: "nextLink", - type: { - name: "String" - } - }, value: { serializedName: "value", + required: true, type: { name: "Sequence", element: { type: { name: "Composite", - className: "BigDataPoolResourceInfo" + className: "SqlScriptResource" } } } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } } } } }; -export const AutoScaleProperties: coreClient.CompositeMapper = { +export const SqlScriptResource: coreClient.CompositeMapper = { type: { name: "Composite", - className: "AutoScaleProperties", + className: "SqlScriptResource", modelProperties: { - minNodeCount: { - serializedName: "minNodeCount", + id: { + serializedName: "id", + readOnly: true, type: { - name: "Number" + name: "String" } }, - enabled: { - serializedName: "enabled", + name: { + serializedName: "name", + required: true, type: { - name: "Boolean" + name: "String" } }, - maxNodeCount: { - serializedName: "maxNodeCount", + type: { + serializedName: "type", + readOnly: true, type: { - name: "Number" + name: "String" } - } - } - } -}; - -export const AutoPauseProperties: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "AutoPauseProperties", - modelProperties: { - delayInMinutes: { - serializedName: "delayInMinutes", + }, + etag: { + serializedName: "etag", + readOnly: true, type: { - name: "Number" + name: "String" } }, - enabled: { - serializedName: "enabled", + properties: { + serializedName: "properties", type: { - name: "Boolean" + name: "Composite", + className: "SqlScript" } } } } }; -export const DynamicExecutorAllocation: coreClient.CompositeMapper = { +export const SqlScript: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DynamicExecutorAllocation", + className: "SqlScript", + additionalProperties: { type: { name: "Object" } }, modelProperties: { - enabled: { - serializedName: "enabled", + description: { + serializedName: "description", type: { - name: "Boolean" + name: "String" } - } - } - } -}; - -export const LibraryRequirements: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "LibraryRequirements", - modelProperties: { - time: { - serializedName: "time", - readOnly: true, + }, + type: { + serializedName: "type", type: { - name: "DateTime" + name: "String" } }, content: { serializedName: "content", type: { - name: "String" + name: "Composite", + className: "SqlScriptContent" } }, - filename: { - serializedName: "filename", + folder: { + serializedName: "folder", type: { - name: "String" + name: "Composite", + className: "SqlScriptFolder" } } } } }; -export const LibraryInfo: coreClient.CompositeMapper = { +export const SqlScriptContent: coreClient.CompositeMapper = { type: { name: "Composite", - className: "LibraryInfo", - modelProperties: { - name: { - serializedName: "name", + className: "SqlScriptContent", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + query: { + serializedName: "query", + required: true, type: { name: "String" } }, - path: { - serializedName: "path", + currentConnection: { + serializedName: "currentConnection", type: { - name: "String" + name: "Composite", + className: "SqlConnection" } }, - containerName: { - serializedName: "containerName", + resultLimit: { + serializedName: "resultLimit", type: { - name: "String" + name: "Number" } }, - uploadedTimestamp: { - serializedName: "uploadedTimestamp", - readOnly: true, + metadata: { + serializedName: "metadata", type: { - name: "DateTime" + name: "Composite", + className: "SqlScriptMetadata" } - }, + } + } + } +}; + +export const SqlConnection: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "SqlConnection", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { type: { serializedName: "type", type: { name: "String" } }, - provisioningStatus: { - serializedName: "provisioningStatus", - readOnly: true, + name: { + serializedName: "name", type: { name: "String" } }, - creatorId: { - serializedName: "creatorId", - readOnly: true, + poolName: { + serializedName: "poolName", + type: { + name: "String" + } + }, + databaseName: { + serializedName: "databaseName", type: { name: "String" } @@ -3470,26 +3601,14 @@ export const LibraryInfo: coreClient.CompositeMapper = { } }; -export const IntegrationRuntimeListResponse: coreClient.CompositeMapper = { +export const SqlScriptMetadata: coreClient.CompositeMapper = { type: { name: "Composite", - className: "IntegrationRuntimeListResponse", + className: "SqlScriptMetadata", + additionalProperties: { type: { name: "Object" } }, modelProperties: { - value: { - serializedName: "value", - required: true, - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "IntegrationRuntimeResource" - } - } - } - }, - nextLink: { - serializedName: "nextLink", + language: { + serializedName: "language", type: { name: "String" } @@ -3498,27 +3617,13 @@ export const IntegrationRuntimeListResponse: coreClient.CompositeMapper = { } }; -export const IntegrationRuntime: coreClient.CompositeMapper = { - serializedName: "IntegrationRuntime", +export const SqlScriptFolder: coreClient.CompositeMapper = { type: { name: "Composite", - className: "IntegrationRuntime", - uberParent: "IntegrationRuntime", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: { - serializedName: "type", - clientName: "type" - }, + className: "SqlScriptFolder", modelProperties: { - type: { - serializedName: "type", - required: true, - type: { - name: "String" - } - }, - description: { - serializedName: "description", + name: { + serializedName: "name", type: { name: "String" } @@ -3527,10 +3632,10 @@ export const IntegrationRuntime: coreClient.CompositeMapper = { } }; -export const LibraryListResponse: coreClient.CompositeMapper = { +export const TriggerListResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "LibraryListResponse", + className: "TriggerListResponse", modelProperties: { value: { serializedName: "value", @@ -3540,7 +3645,7 @@ export const LibraryListResponse: coreClient.CompositeMapper = { element: { type: { name: "Composite", - className: "LibraryResource" + className: "TriggerResource" } } } @@ -3555,56 +3660,96 @@ export const LibraryListResponse: coreClient.CompositeMapper = { } }; -export const LibraryResourceProperties: coreClient.CompositeMapper = { +export const Trigger: coreClient.CompositeMapper = { + serializedName: "Trigger", type: { name: "Composite", - className: "LibraryResourceProperties", + className: "Trigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, modelProperties: { - name: { - serializedName: "name", - readOnly: true, + type: { + serializedName: "type", + required: true, type: { name: "String" } }, - path: { - serializedName: "path", - readOnly: true, + description: { + serializedName: "description", type: { name: "String" } }, - containerName: { - serializedName: "containerName", + runtimeState: { + serializedName: "runtimeState", readOnly: true, type: { name: "String" } }, - uploadedTimestamp: { - serializedName: "uploadedTimestamp", - readOnly: true, + annotations: { + serializedName: "annotations", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "any" + } + } } - }, - type: { - serializedName: "type", + } + } + } +}; + +export const TriggerSubscriptionOperationStatus: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerSubscriptionOperationStatus", + modelProperties: { + triggerName: { + serializedName: "triggerName", readOnly: true, type: { name: "String" } }, - provisioningStatus: { - serializedName: "provisioningStatus", + status: { + serializedName: "status", readOnly: true, type: { name: "String" } + } + } + } +}; + +export const TriggerRunsQueryResponse: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerRunsQueryResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "TriggerRun" + } + } + } }, - creatorId: { - serializedName: "creatorId", - readOnly: true, + continuationToken: { + serializedName: "continuationToken", type: { name: "String" } @@ -3613,153 +3758,102 @@ export const LibraryResourceProperties: coreClient.CompositeMapper = { } }; -export const LibraryResourceInfo: coreClient.CompositeMapper = { +export const TriggerRun: coreClient.CompositeMapper = { type: { name: "Composite", - className: "LibraryResourceInfo", + className: "TriggerRun", + additionalProperties: { type: { name: "Object" } }, modelProperties: { - id: { - serializedName: "id", + triggerRunId: { + serializedName: "triggerRunId", readOnly: true, type: { name: "String" } }, - recordId: { - serializedName: "recordId", - readOnly: true, - type: { - name: "Number" - } - }, - state: { - serializedName: "state", + triggerName: { + serializedName: "triggerName", readOnly: true, type: { name: "String" } }, - created: { - serializedName: "created", + triggerType: { + serializedName: "triggerType", readOnly: true, type: { name: "String" } }, - changed: { - serializedName: "changed", + triggerRunTimestamp: { + serializedName: "triggerRunTimestamp", readOnly: true, type: { - name: "String" + name: "DateTime" } }, - type: { - serializedName: "type", + status: { + serializedName: "status", readOnly: true, type: { name: "String" } }, - name: { - serializedName: "name", + message: { + serializedName: "message", readOnly: true, type: { name: "String" } }, - operationId: { - serializedName: "operationId", + properties: { + serializedName: "properties", readOnly: true, type: { - name: "String" + name: "Dictionary", + value: { type: { name: "String" } } } }, - artifactId: { - serializedName: "artifactId", + triggeredPipelines: { + serializedName: "triggeredPipelines", readOnly: true, type: { - name: "String" + name: "Dictionary", + value: { type: { name: "String" } } } } } } }; -export const OperationResult: coreClient.CompositeMapper = { +export const DataLakeStorageAccountDetails: coreClient.CompositeMapper = { type: { name: "Composite", - className: "OperationResult", + className: "DataLakeStorageAccountDetails", modelProperties: { - status: { - serializedName: "status", - readOnly: true, - type: { - name: "String" - } - }, - code: { - serializedName: "error.code", + accountUrl: { + serializedName: "accountUrl", type: { name: "String" } }, - message: { - serializedName: "error.message", + filesystem: { + serializedName: "filesystem", type: { name: "String" } - }, - target: { - serializedName: "error.target", - type: { - name: "String" - } - }, - details: { - serializedName: "error.details", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "CloudError" - } - } - } } } } }; -export const GitHubAccessTokenRequest: coreClient.CompositeMapper = { +export const VirtualNetworkProfile: coreClient.CompositeMapper = { type: { name: "Composite", - className: "GitHubAccessTokenRequest", + className: "VirtualNetworkProfile", modelProperties: { - gitHubClientId: { - serializedName: "gitHubClientId", - required: true, - type: { - name: "String" - } - }, - gitHubClientSecret: { - serializedName: "gitHubClientSecret", - type: { - name: "Composite", - className: "GitHubClientSecret" - } - }, - gitHubAccessCode: { - serializedName: "gitHubAccessCode", - required: true, - type: { - name: "String" - } - }, - gitHubAccessTokenBaseUrl: { - serializedName: "gitHubAccessTokenBaseUrl", - required: true, + computeSubnetId: { + serializedName: "computeSubnetId", type: { name: "String" } @@ -3768,13 +3862,14 @@ export const GitHubAccessTokenRequest: coreClient.CompositeMapper = { } }; -export const GitHubAccessTokenResponse: coreClient.CompositeMapper = { +export const PrivateEndpoint: coreClient.CompositeMapper = { type: { name: "Composite", - className: "GitHubAccessTokenResponse", + className: "PrivateEndpoint", modelProperties: { - gitHubAccessToken: { - serializedName: "gitHubAccessToken", + id: { + serializedName: "id", + readOnly: true, type: { name: "String" } @@ -3783,42 +3878,26 @@ export const GitHubAccessTokenResponse: coreClient.CompositeMapper = { } }; -export const Expression: coreClient.CompositeMapper = { +export const PrivateLinkServiceConnectionState: coreClient.CompositeMapper = { type: { name: "Composite", - className: "Expression", + className: "PrivateLinkServiceConnectionState", modelProperties: { - type: { - serializedName: "type", - required: true, + status: { + serializedName: "status", type: { name: "String" } }, - value: { - serializedName: "value", - required: true, + description: { + serializedName: "description", type: { name: "String" } - } - } - } -}; - -export const SecretBase: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "SecretBase", - uberParent: "SecretBase", - polymorphicDiscriminator: { - serializedName: "type", - clientName: "type" - }, - modelProperties: { - type: { - serializedName: "type", - required: true, + }, + actionsRequired: { + serializedName: "actionsRequired", + readOnly: true, type: { name: "String" } @@ -3827,143 +3906,92 @@ export const SecretBase: coreClient.CompositeMapper = { } }; -export const StartDataFlowDebugSessionRequest: coreClient.CompositeMapper = { +export const EncryptionDetails: coreClient.CompositeMapper = { type: { name: "Composite", - className: "StartDataFlowDebugSessionRequest", + className: "EncryptionDetails", modelProperties: { - sessionId: { - serializedName: "sessionId", + doubleEncryptionEnabled: { + serializedName: "doubleEncryptionEnabled", + readOnly: true, type: { - name: "String" + name: "Boolean" } }, - dataFlow: { - serializedName: "dataFlow", + cmk: { + serializedName: "cmk", type: { name: "Composite", - className: "DataFlowResource" - } - }, - datasets: { - serializedName: "datasets", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "DatasetResource" - } - } - } - }, - linkedServices: { - serializedName: "linkedServices", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "LinkedServiceResource" - } - } - } - }, - staging: { - serializedName: "staging", - type: { - name: "any" - } - }, - debugSettings: { - serializedName: "debugSettings", - type: { - name: "any" - } - }, - incrementalDebug: { - serializedName: "incrementalDebug", - type: { - name: "Boolean" + className: "CustomerManagedKeyDetails" } } } } }; -export const StartDataFlowDebugSessionResponse: coreClient.CompositeMapper = { +export const CustomerManagedKeyDetails: coreClient.CompositeMapper = { type: { name: "Composite", - className: "StartDataFlowDebugSessionResponse", + className: "CustomerManagedKeyDetails", modelProperties: { - jobVersion: { - serializedName: "jobVersion", + status: { + serializedName: "status", + readOnly: true, type: { name: "String" } + }, + key: { + serializedName: "key", + type: { + name: "Composite", + className: "WorkspaceKeyDetails" + } } } } }; -export const DataFlowDebugPreviewDataRequest: coreClient.CompositeMapper = { +export const WorkspaceKeyDetails: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DataFlowDebugPreviewDataRequest", + className: "WorkspaceKeyDetails", modelProperties: { - sessionId: { - serializedName: "sessionId", - type: { - name: "String" - } - }, - dataFlowName: { - serializedName: "dataFlowName", + name: { + serializedName: "name", type: { name: "String" } }, - streamName: { - serializedName: "streamName", + keyVaultUrl: { + serializedName: "keyVaultUrl", type: { name: "String" } - }, - rowLimits: { - serializedName: "rowLimits", - type: { - name: "Number" - } } } } }; -export const DataFlowDebugStatisticsRequest: coreClient.CompositeMapper = { +export const ManagedVirtualNetworkSettings: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DataFlowDebugStatisticsRequest", + className: "ManagedVirtualNetworkSettings", modelProperties: { - sessionId: { - serializedName: "sessionId", - type: { - name: "String" - } - }, - dataFlowName: { - serializedName: "dataFlowName", + preventDataExfiltration: { + serializedName: "preventDataExfiltration", type: { - name: "String" + name: "Boolean" } }, - streamName: { - serializedName: "streamName", + linkedAccessCheckOnTargetResource: { + serializedName: "linkedAccessCheckOnTargetResource", type: { - name: "String" + name: "Boolean" } }, - columns: { - serializedName: "columns", + allowedAadTenantIdsForLinking: { + serializedName: "allowedAadTenantIdsForLinking", type: { name: "Sequence", element: { @@ -3977,73 +4005,184 @@ export const DataFlowDebugStatisticsRequest: coreClient.CompositeMapper = { } }; -export const EvaluateDataFlowExpressionRequest: coreClient.CompositeMapper = { +export const WorkspaceRepositoryConfiguration: coreClient.CompositeMapper = { type: { name: "Composite", - className: "EvaluateDataFlowExpressionRequest", + className: "WorkspaceRepositoryConfiguration", modelProperties: { - sessionId: { - serializedName: "sessionId", + type: { + serializedName: "type", type: { name: "String" } }, - dataFlowName: { - serializedName: "dataFlowName", + hostName: { + serializedName: "hostName", type: { name: "String" } }, - streamName: { - serializedName: "streamName", + accountName: { + serializedName: "accountName", type: { name: "String" } }, - rowLimits: { - serializedName: "rowLimits", + projectName: { + serializedName: "projectName", type: { - name: "Number" + name: "String" } }, - expression: { - serializedName: "expression", + repositoryName: { + serializedName: "repositoryName", type: { name: "String" } - } - } - } -}; - -export const DataFlowDebugQueryResponse: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "DataFlowDebugQueryResponse", - modelProperties: { - runId: { - serializedName: "runId", - type: { - name: "String" + }, + collaborationBranch: { + serializedName: "collaborationBranch", + type: { + name: "String" + } + }, + rootFolder: { + serializedName: "rootFolder", + type: { + name: "String" + } + }, + lastCommitId: { + serializedName: "lastCommitId", + type: { + name: "String" + } + }, + tenantId: { + serializedName: "tenantId", + type: { + name: "Uuid" + } + }, + clientId: { + serializedName: "clientId", + type: { + name: "String" + } + }, + clientSecret: { + serializedName: "clientSecret", + type: { + name: "Composite", + className: "GitHubClientSecret" } } } } }; -export const DataFlowDebugResultResponse: coreClient.CompositeMapper = { +export const GitHubClientSecret: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DataFlowDebugResultResponse", + className: "GitHubClientSecret", modelProperties: { - status: { - serializedName: "status", + byoaSecretAkvUrl: { + serializedName: "byoaSecretAkvUrl", type: { name: "String" } }, - data: { - serializedName: "data", + byoaSecretName: { + serializedName: "byoaSecretName", + type: { + name: "String" + } + } + } + } +}; + +export const PurviewConfiguration: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "PurviewConfiguration", + modelProperties: { + purviewResourceId: { + serializedName: "purviewResourceId", + type: { + name: "String" + } + } + } + } +}; + +export const ManagedIdentity: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedIdentity", + modelProperties: { + principalId: { + serializedName: "principalId", + readOnly: true, + type: { + name: "String" + } + }, + tenantId: { + serializedName: "tenantId", + readOnly: true, + type: { + name: "Uuid" + } + }, + type: { + serializedName: "type", + type: { + name: "Enum", + allowedValues: ["None", "SystemAssigned"] + } + } + } + } +}; + +export const Expression: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "Expression", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + value: { + serializedName: "value", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SecretBase: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "SecretBase", + uberParent: "SecretBase", + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, type: { name: "String" } @@ -4465,107 +4604,332 @@ export const SqlPoolReference: coreClient.CompositeMapper = { } }; -export const Transformation: coreClient.CompositeMapper = { +export const StartDataFlowDebugSessionRequest: coreClient.CompositeMapper = { type: { name: "Composite", - className: "Transformation", + className: "StartDataFlowDebugSessionRequest", modelProperties: { - name: { - serializedName: "name", - required: true, + sessionId: { + serializedName: "sessionId", type: { name: "String" } }, - description: { - serializedName: "description", + dataFlow: { + serializedName: "dataFlow", type: { - name: "String" + name: "Composite", + className: "DataFlowResource" } - } - } - } -}; - -export const DatasetLocation: coreClient.CompositeMapper = { - serializedName: "DatasetLocation", - type: { - name: "Composite", - className: "DatasetLocation", - uberParent: "DatasetLocation", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: { - serializedName: "type", - clientName: "type" - }, - modelProperties: { - type: { - serializedName: "type", - required: true, + }, + datasets: { + serializedName: "datasets", type: { - name: "String" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetResource" + } + } } }, - folderPath: { - serializedName: "folderPath", + linkedServices: { + serializedName: "linkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceResource" + } + } + } + }, + staging: { + serializedName: "staging", type: { name: "any" } }, - fileName: { - serializedName: "fileName", + debugSettings: { + serializedName: "debugSettings", type: { name: "any" } + }, + incrementalDebug: { + serializedName: "incrementalDebug", + type: { + name: "Boolean" + } } } } }; -export const DatasetDataElement: coreClient.CompositeMapper = { +export const StartDataFlowDebugSessionResponse: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DatasetDataElement", + className: "StartDataFlowDebugSessionResponse", modelProperties: { - name: { - serializedName: "name", - type: { - name: "any" - } - }, - type: { - serializedName: "type", + jobVersion: { + serializedName: "jobVersion", type: { - name: "any" + name: "String" } } } } }; -export const DatasetSchemaDataElement: coreClient.CompositeMapper = { +export const DataFlowDebugPreviewDataRequest: coreClient.CompositeMapper = { type: { name: "Composite", - className: "DatasetSchemaDataElement", - additionalProperties: { type: { name: "Object" } }, + className: "DataFlowDebugPreviewDataRequest", modelProperties: { - name: { - serializedName: "name", + sessionId: { + serializedName: "sessionId", type: { - name: "any" + name: "String" } }, - type: { - serializedName: "type", + dataFlowName: { + serializedName: "dataFlowName", type: { - name: "any" + name: "String" + } + }, + streamName: { + serializedName: "streamName", + type: { + name: "String" + } + }, + rowLimits: { + serializedName: "rowLimits", + type: { + name: "Number" } } } } }; -export const DatasetStorageFormat: coreClient.CompositeMapper = { +export const DataFlowDebugStatisticsRequest: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugStatisticsRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + streamName: { + serializedName: "streamName", + type: { + name: "String" + } + }, + columns: { + serializedName: "columns", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const EvaluateDataFlowExpressionRequest: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "EvaluateDataFlowExpressionRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + streamName: { + serializedName: "streamName", + type: { + name: "String" + } + }, + rowLimits: { + serializedName: "rowLimits", + type: { + name: "Number" + } + }, + expression: { + serializedName: "expression", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugQueryResponse: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugQueryResponse", + modelProperties: { + runId: { + serializedName: "runId", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugResultResponse: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugResultResponse", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + }, + data: { + serializedName: "data", + type: { + name: "String" + } + } + } + } +}; + +export const Transformation: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "Transformation", + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + } + } + } +}; + +export const DatasetLocation: coreClient.CompositeMapper = { + serializedName: "DatasetLocation", + type: { + name: "Composite", + className: "DatasetLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + folderPath: { + serializedName: "folderPath", + type: { + name: "any" + } + }, + fileName: { + serializedName: "fileName", + type: { + name: "any" + } + } + } + } +}; + +export const DatasetDataElement: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetDataElement", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "any" + } + }, + type: { + serializedName: "type", + type: { + name: "any" + } + } + } + } +}; + +export const DatasetSchemaDataElement: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetSchemaDataElement", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + name: { + serializedName: "name", + type: { + name: "any" + } + }, + type: { + serializedName: "type", + type: { + name: "any" + } + } + } + } +}; + +export const DatasetStorageFormat: coreClient.CompositeMapper = { serializedName: "DatasetStorageFormat", type: { name: "Composite", @@ -4674,7 +5038,7 @@ export const ScriptAction: coreClient.CompositeMapper = { serializedName: "roles", required: true, type: { - name: "String" + name: "any" } }, parameters: { @@ -4934,27 +5298,6 @@ export const CopySource: coreClient.CompositeMapper = { } }; -export const AdditionalColumns: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "AdditionalColumns", - modelProperties: { - name: { - serializedName: "name", - type: { - name: "any" - } - }, - value: { - serializedName: "value", - type: { - name: "any" - } - } - } - } -}; - export const CopySink: coreClient.CompositeMapper = { serializedName: "CopySink", type: { @@ -5317,11 +5660,17 @@ export const OraclePartitionSettings: coreClient.CompositeMapper = { } }; -export const TeradataPartitionSettings: coreClient.CompositeMapper = { +export const AmazonRdsForOraclePartitionSettings: coreClient.CompositeMapper = { type: { name: "Composite", - className: "TeradataPartitionSettings", + className: "AmazonRdsForOraclePartitionSettings", modelProperties: { + partitionNames: { + serializedName: "partitionNames", + type: { + name: "any" + } + }, partitionColumnName: { serializedName: "partitionColumnName", type: { @@ -5344,20 +5693,47 @@ export const TeradataPartitionSettings: coreClient.CompositeMapper = { } }; -export const MongoDbCursorMethodsProperties: coreClient.CompositeMapper = { +export const TeradataPartitionSettings: coreClient.CompositeMapper = { type: { name: "Composite", - className: "MongoDbCursorMethodsProperties", - additionalProperties: { type: { name: "Object" } }, + className: "TeradataPartitionSettings", modelProperties: { - project: { - serializedName: "project", + partitionColumnName: { + serializedName: "partitionColumnName", type: { name: "any" } }, - sort: { - serializedName: "sort", + partitionUpperBound: { + serializedName: "partitionUpperBound", + type: { + name: "any" + } + }, + partitionLowerBound: { + serializedName: "partitionLowerBound", + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbCursorMethodsProperties: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "MongoDbCursorMethodsProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + project: { + serializedName: "project", + type: { + name: "any" + } + }, + sort: { + serializedName: "sort", type: { name: "any" } @@ -5557,6 +5933,27 @@ export const DWCopyCommandDefaultValue: coreClient.CompositeMapper = { } }; +export const AdditionalColumns: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "AdditionalColumns", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "any" + } + }, + value: { + serializedName: "value", + type: { + name: "any" + } + } + } + } +}; + export const CopyTranslator: coreClient.CompositeMapper = { serializedName: "CopyTranslator", type: { @@ -6320,6 +6717,12 @@ export const IntegrationRuntimeDataFlowProperties: coreClient.CompositeMapper = type: { name: "Number" } + }, + cleanup: { + serializedName: "cleanup", + type: { + name: "Boolean" + } } } } @@ -6641,44 +7044,53 @@ export const ProxyResource: coreClient.CompositeMapper = { } }; -export const AzureStorageLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureStorage", +export const MappingDataFlow: coreClient.CompositeMapper = { + serializedName: "MappingDataFlow", type: { name: "Composite", - className: "AzureStorageLinkedService", - uberParent: "LinkedService", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + className: "MappingDataFlow", + uberParent: "DataFlow", + polymorphicDiscriminator: DataFlow.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - type: { - name: "any" - } - }, - accountKey: { - serializedName: "typeProperties.accountKey", + ...DataFlow.type.modelProperties, + sources: { + serializedName: "typeProperties.sources", type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowSource" + } + } } }, - sasUri: { - serializedName: "typeProperties.sasUri", + sinks: { + serializedName: "typeProperties.sinks", type: { - name: "any" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowSink" + } + } } }, - sasToken: { - serializedName: "typeProperties.sasToken", + transformations: { + serializedName: "typeProperties.transformations", type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Transformation" + } + } } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + script: { + serializedName: "typeProperties.script", type: { name: "String" } @@ -6687,279 +7099,310 @@ export const AzureStorageLinkedService: coreClient.CompositeMapper = { } }; -export const AzureBlobStorageLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureBlobStorage", +export const IntegrationRuntimeDebugResource: coreClient.CompositeMapper = { type: { name: "Composite", - className: "AzureBlobStorageLinkedService", - uberParent: "LinkedService", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + className: "IntegrationRuntimeDebugResource", modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", + ...SubResourceDebugResource.type.modelProperties, + properties: { + serializedName: "properties", type: { - name: "any" + name: "Composite", + className: "IntegrationRuntime" } - }, - accountKey: { - serializedName: "typeProperties.accountKey", + } + } + } +}; + +export const DataFlowDebugResource: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugResource", + modelProperties: { + ...SubResourceDebugResource.type.modelProperties, + properties: { + serializedName: "properties", type: { name: "Composite", - className: "AzureKeyVaultSecretReference" + className: "DataFlow" } - }, - sasUri: { - serializedName: "typeProperties.sasUri", + } + } + } +}; + +export const DatasetDebugResource: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetDebugResource", + modelProperties: { + ...SubResourceDebugResource.type.modelProperties, + properties: { + serializedName: "properties", type: { - name: "any" + name: "Composite", + className: "Dataset" } - }, - sasToken: { - serializedName: "typeProperties.sasToken", + } + } + } +}; + +export const LinkedServiceDebugResource: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedServiceDebugResource", + modelProperties: { + ...SubResourceDebugResource.type.modelProperties, + properties: { + serializedName: "properties", type: { name: "Composite", - className: "AzureKeyVaultSecretReference" + className: "LinkedService" } - }, - serviceEndpoint: { - serializedName: "typeProperties.serviceEndpoint", + } + } + } +}; + +export const ManagedIntegrationRuntime: coreClient.CompositeMapper = { + serializedName: "Managed", + type: { + name: "Composite", + className: "ManagedIntegrationRuntime", + uberParent: "IntegrationRuntime", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: IntegrationRuntime.type.polymorphicDiscriminator, + modelProperties: { + ...IntegrationRuntime.type.modelProperties, + state: { + serializedName: "state", + readOnly: true, type: { name: "String" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", - type: { - name: "any" - } - }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", + managedVirtualNetwork: { + serializedName: "managedVirtualNetwork", type: { name: "Composite", - className: "SecretBase" - } - }, - tenant: { - serializedName: "typeProperties.tenant", - type: { - name: "any" + className: "ManagedVirtualNetworkReference" } }, - azureCloudType: { - serializedName: "typeProperties.azureCloudType", + computeProperties: { + serializedName: "typeProperties.computeProperties", type: { - name: "any" + name: "Composite", + className: "IntegrationRuntimeComputeProperties" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + ssisProperties: { + serializedName: "typeProperties.ssisProperties", type: { - name: "String" + name: "Composite", + className: "IntegrationRuntimeSsisProperties" } } } } }; -export const AzureTableStorageLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureTableStorage", +export const SelfHostedIntegrationRuntime: coreClient.CompositeMapper = { + serializedName: "SelfHosted", type: { name: "Composite", - className: "AzureTableStorageLinkedService", - uberParent: "LinkedService", + className: "SelfHostedIntegrationRuntime", + uberParent: "IntegrationRuntime", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: IntegrationRuntime.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - type: { - name: "any" - } - }, - accountKey: { - serializedName: "typeProperties.accountKey", - type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" - } - }, - sasUri: { - serializedName: "typeProperties.sasUri", - type: { - name: "any" - } - }, - sasToken: { - serializedName: "typeProperties.sasToken", + ...IntegrationRuntime.type.modelProperties, + linkedInfo: { + serializedName: "typeProperties.linkedInfo", type: { name: "Composite", - className: "AzureKeyVaultSecretReference" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", - type: { - name: "String" + className: "LinkedIntegrationRuntimeType" } } } } }; -export const AzureSqlDWLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureSqlDW", +export const AmazonS3Dataset: coreClient.CompositeMapper = { + serializedName: "AmazonS3Object", type: { name: "Composite", - className: "AzureSqlDWLinkedService", - uberParent: "LinkedService", + className: "AmazonS3Dataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", + ...Dataset.type.modelProperties, + bucketName: { + serializedName: "typeProperties.bucketName", required: true, type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + key: { + serializedName: "typeProperties.key", type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" + name: "any" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", + prefix: { + serializedName: "typeProperties.prefix", type: { name: "any" } }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", + version: { + serializedName: "typeProperties.version", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - tenant: { - serializedName: "typeProperties.tenant", + modifiedDatetimeStart: { + serializedName: "typeProperties.modifiedDatetimeStart", type: { name: "any" } }, - azureCloudType: { - serializedName: "typeProperties.azureCloudType", + modifiedDatetimeEnd: { + serializedName: "typeProperties.modifiedDatetimeEnd", type: { name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + format: { + serializedName: "typeProperties.format", type: { - name: "any" + name: "Composite", + className: "DatasetStorageFormat" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" } } } } }; -export const SqlServerLinkedService: coreClient.CompositeMapper = { - serializedName: "SqlServer", +export const AvroDataset: coreClient.CompositeMapper = { + serializedName: "Avro", type: { name: "Composite", - className: "SqlServerLinkedService", - uberParent: "LinkedService", + className: "AvroDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - required: true, + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", type: { - name: "any" + name: "Composite", + className: "DatasetLocation" } }, - userName: { - serializedName: "typeProperties.userName", + avroCompressionCodec: { + serializedName: "typeProperties.avroCompressionCodec", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + avroCompressionLevel: { + constraints: { + InclusiveMaximum: 9, + InclusiveMinimum: 1 + }, + serializedName: "typeProperties.avroCompressionLevel", type: { - name: "any" + name: "Number" } } } } }; -export const AzureSqlDatabaseLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureSqlDatabase", +export const ExcelDataset: coreClient.CompositeMapper = { + serializedName: "Excel", type: { name: "Composite", - className: "AzureSqlDatabaseLinkedService", - uberParent: "LinkedService", + className: "ExcelDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - required: true, + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + sheetName: { + serializedName: "typeProperties.sheetName", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + range: { + serializedName: "typeProperties.range", type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" + name: "any" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", + firstRowAsHeader: { + serializedName: "typeProperties.firstRowAsHeader", type: { name: "any" } }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", + compression: { + serializedName: "typeProperties.compression", type: { name: "Composite", - className: "SecretBase" + className: "DatasetCompression" } }, - tenant: { - serializedName: "typeProperties.tenant", + nullValue: { + serializedName: "typeProperties.nullValue", type: { name: "any" } - }, - azureCloudType: { - serializedName: "typeProperties.azureCloudType", + } + } + } +}; + +export const ParquetDataset: coreClient.CompositeMapper = { + serializedName: "Parquet", + type: { + name: "Composite", + className: "ParquetDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", type: { - name: "any" + name: "Composite", + className: "DatasetLocation" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + compressionCodec: { + serializedName: "typeProperties.compressionCodec", type: { name: "any" } @@ -6968,112 +7411,73 @@ export const AzureSqlDatabaseLinkedService: coreClient.CompositeMapper = { } }; -export const AzureSqlMILinkedService: coreClient.CompositeMapper = { - serializedName: "AzureSqlMI", +export const DelimitedTextDataset: coreClient.CompositeMapper = { + serializedName: "DelimitedText", type: { name: "Composite", - className: "AzureSqlMILinkedService", - uberParent: "LinkedService", + className: "DelimitedTextDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - required: true, + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + columnDelimiter: { + serializedName: "typeProperties.columnDelimiter", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + rowDelimiter: { + serializedName: "typeProperties.rowDelimiter", type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" + name: "any" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", + encodingName: { + serializedName: "typeProperties.encodingName", type: { name: "any" } }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", + compressionCodec: { + serializedName: "typeProperties.compressionCodec", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - tenant: { - serializedName: "typeProperties.tenant", + compressionLevel: { + serializedName: "typeProperties.compressionLevel", type: { name: "any" } }, - azureCloudType: { - serializedName: "typeProperties.azureCloudType", + quoteChar: { + serializedName: "typeProperties.quoteChar", type: { name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", - type: { - name: "any" - } - } - } - } -}; - -export const AzureBatchLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureBatch", - type: { - name: "Composite", - className: "AzureBatchLinkedService", - uberParent: "LinkedService", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, - modelProperties: { - ...LinkedService.type.modelProperties, - accountName: { - serializedName: "typeProperties.accountName", - required: true, - type: { - name: "any" - } - }, - accessKey: { - serializedName: "typeProperties.accessKey", - type: { - name: "Composite", - className: "SecretBase" - } - }, - batchUri: { - serializedName: "typeProperties.batchUri", - required: true, + escapeChar: { + serializedName: "typeProperties.escapeChar", type: { name: "any" } }, - poolName: { - serializedName: "typeProperties.poolName", - required: true, + firstRowAsHeader: { + serializedName: "typeProperties.firstRowAsHeader", type: { name: "any" } }, - linkedServiceName: { - serializedName: "typeProperties.linkedServiceName", - type: { - name: "Composite", - className: "LinkedServiceReference" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + nullValue: { + serializedName: "typeProperties.nullValue", type: { name: "any" } @@ -7082,244 +7486,206 @@ export const AzureBatchLinkedService: coreClient.CompositeMapper = { } }; -export const AzureKeyVaultLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureKeyVault", +export const JsonDataset: coreClient.CompositeMapper = { + serializedName: "Json", type: { name: "Composite", - className: "AzureKeyVaultLinkedService", - uberParent: "LinkedService", + className: "JsonDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - baseUrl: { - serializedName: "typeProperties.baseUrl", - required: true, + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + encodingName: { + serializedName: "typeProperties.encodingName", type: { name: "any" } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } } } } }; -export const CosmosDbLinkedService: coreClient.CompositeMapper = { - serializedName: "CosmosDb", +export const XmlDataset: coreClient.CompositeMapper = { + serializedName: "Xml", type: { name: "Composite", - className: "CosmosDbLinkedService", - uberParent: "LinkedService", + className: "XmlDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", type: { - name: "any" + name: "Composite", + className: "DatasetLocation" } }, - accountEndpoint: { - serializedName: "typeProperties.accountEndpoint", + encodingName: { + serializedName: "typeProperties.encodingName", type: { name: "any" } }, - database: { - serializedName: "typeProperties.database", + nullValue: { + serializedName: "typeProperties.nullValue", type: { name: "any" } }, - accountKey: { - serializedName: "typeProperties.accountKey", + compression: { + serializedName: "typeProperties.compression", type: { name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", - type: { - name: "any" + className: "DatasetCompression" } } } } }; -export const DynamicsLinkedService: coreClient.CompositeMapper = { - serializedName: "Dynamics", +export const OrcDataset: coreClient.CompositeMapper = { + serializedName: "Orc", type: { name: "Composite", - className: "DynamicsLinkedService", - uberParent: "LinkedService", + className: "OrcDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - deploymentType: { - serializedName: "typeProperties.deploymentType", - required: true, + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", type: { - name: "String" + name: "Composite", + className: "DatasetLocation" } }, - hostName: { - serializedName: "typeProperties.hostName", + orcCompressionCodec: { + serializedName: "typeProperties.orcCompressionCodec", type: { name: "any" } - }, - port: { - serializedName: "typeProperties.port", + } + } + } +}; + +export const BinaryDataset: coreClient.CompositeMapper = { + serializedName: "Binary", + type: { + name: "Composite", + className: "BinaryDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", type: { - name: "any" + name: "Composite", + className: "DatasetLocation" } }, - serviceUri: { - serializedName: "typeProperties.serviceUri", + compression: { + serializedName: "typeProperties.compression", type: { - name: "any" + name: "Composite", + className: "DatasetCompression" } - }, - organizationName: { - serializedName: "typeProperties.organizationName", + } + } + } +}; + +export const AzureBlobDataset: coreClient.CompositeMapper = { + serializedName: "AzureBlob", + type: { + name: "Composite", + className: "AzureBlobDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + folderPath: { + serializedName: "typeProperties.folderPath", type: { name: "any" } }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - required: true, - type: { - name: "String" - } - }, - username: { - serializedName: "typeProperties.username", + tableRootLocation: { + serializedName: "typeProperties.tableRootLocation", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + fileName: { + serializedName: "typeProperties.fileName", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", + modifiedDatetimeStart: { + serializedName: "typeProperties.modifiedDatetimeStart", type: { name: "any" } }, - servicePrincipalCredentialType: { - serializedName: "typeProperties.servicePrincipalCredentialType", + modifiedDatetimeEnd: { + serializedName: "typeProperties.modifiedDatetimeEnd", type: { - name: "String" + name: "any" } }, - servicePrincipalCredential: { - serializedName: "typeProperties.servicePrincipalCredential", + format: { + serializedName: "typeProperties.format", type: { name: "Composite", - className: "SecretBase" + className: "DatasetStorageFormat" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + compression: { + serializedName: "typeProperties.compression", type: { - name: "any" + name: "Composite", + className: "DatasetCompression" } } } } }; -export const DynamicsCrmLinkedService: coreClient.CompositeMapper = { - serializedName: "DynamicsCrm", +export const AzureTableDataset: coreClient.CompositeMapper = { + serializedName: "AzureTable", type: { name: "Composite", - className: "DynamicsCrmLinkedService", - uberParent: "LinkedService", + className: "AzureTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - deploymentType: { - serializedName: "typeProperties.deploymentType", + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", required: true, - type: { - name: "String" - } - }, - hostName: { - serializedName: "typeProperties.hostName", - type: { - name: "any" - } - }, - port: { - serializedName: "typeProperties.port", - type: { - name: "any" - } - }, - serviceUri: { - serializedName: "typeProperties.serviceUri", - type: { - name: "any" - } - }, - organizationName: { - serializedName: "typeProperties.organizationName", - type: { - name: "any" - } - }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - required: true, - type: { - name: "String" - } - }, - username: { - serializedName: "typeProperties.username", - type: { - name: "any" - } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", - type: { - name: "any" - } - }, - servicePrincipalCredentialType: { - serializedName: "typeProperties.servicePrincipalCredentialType", - type: { - name: "any" - } - }, - servicePrincipalCredential: { - serializedName: "typeProperties.servicePrincipalCredential", - type: { - name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -7328,88 +7694,30 @@ export const DynamicsCrmLinkedService: coreClient.CompositeMapper = { } }; -export const CommonDataServiceForAppsLinkedService: coreClient.CompositeMapper = { - serializedName: "CommonDataServiceForApps", +export const AzureSqlTableDataset: coreClient.CompositeMapper = { + serializedName: "AzureSqlTable", type: { name: "Composite", - className: "CommonDataServiceForAppsLinkedService", - uberParent: "LinkedService", + className: "AzureSqlTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - deploymentType: { - serializedName: "typeProperties.deploymentType", - required: true, - type: { - name: "String" - } - }, - hostName: { - serializedName: "typeProperties.hostName", - type: { - name: "any" - } - }, - port: { - serializedName: "typeProperties.port", - type: { - name: "any" - } - }, - serviceUri: { - serializedName: "typeProperties.serviceUri", - type: { - name: "any" - } - }, - organizationName: { - serializedName: "typeProperties.organizationName", - type: { - name: "any" - } - }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - required: true, - type: { - name: "String" - } - }, - username: { - serializedName: "typeProperties.username", - type: { - name: "any" - } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - servicePrincipalCredentialType: { - serializedName: "typeProperties.servicePrincipalCredentialType", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } }, - servicePrincipalCredential: { - serializedName: "typeProperties.servicePrincipalCredential", - type: { - name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + table: { + serializedName: "typeProperties.table", type: { name: "any" } @@ -7418,64 +7726,30 @@ export const CommonDataServiceForAppsLinkedService: coreClient.CompositeMapper = } }; -export const HDInsightLinkedService: coreClient.CompositeMapper = { - serializedName: "HDInsight", +export const AzureSqlMITableDataset: coreClient.CompositeMapper = { + serializedName: "AzureSqlMITable", type: { name: "Composite", - className: "HDInsightLinkedService", - uberParent: "LinkedService", + className: "AzureSqlMITableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - clusterUri: { - serializedName: "typeProperties.clusterUri", - required: true, - type: { - name: "any" - } - }, - userName: { - serializedName: "typeProperties.userName", - type: { - name: "any" - } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - linkedServiceName: { - serializedName: "typeProperties.linkedServiceName", - type: { - name: "Composite", - className: "LinkedServiceReference" - } - }, - hcatalogLinkedServiceName: { - serializedName: "typeProperties.hcatalogLinkedServiceName", - type: { - name: "Composite", - className: "LinkedServiceReference" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - isEspEnabled: { - serializedName: "typeProperties.isEspEnabled", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } }, - fileSystem: { - serializedName: "typeProperties.fileSystem", + table: { + serializedName: "typeProperties.table", type: { name: "any" } @@ -7484,38 +7758,30 @@ export const HDInsightLinkedService: coreClient.CompositeMapper = { } }; -export const FileServerLinkedService: coreClient.CompositeMapper = { - serializedName: "FileServer", +export const AzureSqlDWTableDataset: coreClient.CompositeMapper = { + serializedName: "AzureSqlDWTable", type: { name: "Composite", - className: "FileServerLinkedService", - uberParent: "LinkedService", + className: "AzureSqlDWTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - userId: { - serializedName: "typeProperties.userId", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + table: { + serializedName: "typeProperties.table", type: { name: "any" } @@ -7524,76 +7790,24 @@ export const FileServerLinkedService: coreClient.CompositeMapper = { } }; -export const AzureFileStorageLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureFileStorage", +export const CassandraTableDataset: coreClient.CompositeMapper = { + serializedName: "CassandraTable", type: { name: "Composite", - className: "AzureFileStorageLinkedService", - uberParent: "LinkedService", + className: "CassandraTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - userId: { - serializedName: "typeProperties.userId", - type: { - name: "any" - } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - connectionString: { - serializedName: "typeProperties.connectionString", - type: { - name: "any" - } - }, - accountKey: { - serializedName: "typeProperties.accountKey", - type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" - } - }, - sasUri: { - serializedName: "typeProperties.sasUri", - type: { - name: "any" - } - }, - sasToken: { - serializedName: "typeProperties.sasToken", - type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" - } - }, - fileShare: { - serializedName: "typeProperties.fileShare", - type: { - name: "any" - } - }, - snapshot: { - serializedName: "typeProperties.snapshot", - type: { - name: "any" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + keyspace: { + serializedName: "typeProperties.keyspace", type: { name: "any" } @@ -7602,37 +7816,18 @@ export const AzureFileStorageLinkedService: coreClient.CompositeMapper = { } }; -export const GoogleCloudStorageLinkedService: coreClient.CompositeMapper = { - serializedName: "GoogleCloudStorage", +export const CustomDataset: coreClient.CompositeMapper = { + serializedName: "CustomDataset", type: { name: "Composite", - className: "GoogleCloudStorageLinkedService", - uberParent: "LinkedService", + className: "CustomDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - accessKeyId: { - serializedName: "typeProperties.accessKeyId", - type: { - name: "any" - } - }, - secretAccessKey: { - serializedName: "typeProperties.secretAccessKey", - type: { - name: "Composite", - className: "SecretBase" - } - }, - serviceUrl: { - serializedName: "typeProperties.serviceUrl", - type: { - name: "any" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + ...Dataset.type.modelProperties, + typeProperties: { + serializedName: "typeProperties", type: { name: "any" } @@ -7641,66 +7836,60 @@ export const GoogleCloudStorageLinkedService: coreClient.CompositeMapper = { } }; -export const OracleLinkedService: coreClient.CompositeMapper = { - serializedName: "Oracle", +export const CosmosDbSqlApiCollectionDataset: coreClient.CompositeMapper = { + serializedName: "CosmosDbSqlApiCollection", type: { name: "Composite", - className: "OracleLinkedService", - uberParent: "LinkedService", + className: "CosmosDbSqlApiCollectionDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", + ...Dataset.type.modelProperties, + collectionName: { + serializedName: "typeProperties.collectionName", required: true, type: { name: "any" } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", - type: { - name: "any" - } } } } }; -export const AzureMySqlLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureMySql", +export const DocumentDbCollectionDataset: coreClient.CompositeMapper = { + serializedName: "DocumentDbCollection", type: { name: "Composite", - className: "AzureMySqlLinkedService", - uberParent: "LinkedService", + className: "DocumentDbCollectionDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", + ...Dataset.type.modelProperties, + collectionName: { + serializedName: "typeProperties.collectionName", required: true, type: { name: "any" } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + } + } + } +}; + +export const DynamicsEntityDataset: coreClient.CompositeMapper = { + serializedName: "DynamicsEntity", + type: { + name: "Composite", + className: "DynamicsEntityDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + entityName: { + serializedName: "typeProperties.entityName", type: { name: "any" } @@ -7709,32 +7898,38 @@ export const AzureMySqlLinkedService: coreClient.CompositeMapper = { } }; -export const MySqlLinkedService: coreClient.CompositeMapper = { - serializedName: "MySql", +export const DynamicsCrmEntityDataset: coreClient.CompositeMapper = { + serializedName: "DynamicsCrmEntity", type: { name: "Composite", - className: "MySqlLinkedService", - uberParent: "LinkedService", + className: "DynamicsCrmEntityDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - required: true, + ...Dataset.type.modelProperties, + entityName: { + serializedName: "typeProperties.entityName", type: { name: "any" } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + } + } + } +}; + +export const CommonDataServiceForAppsEntityDataset: coreClient.CompositeMapper = { + serializedName: "CommonDataServiceForAppsEntity", + type: { + name: "Composite", + className: "CommonDataServiceForAppsEntityDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + entityName: { + serializedName: "typeProperties.entityName", type: { name: "any" } @@ -7743,91 +7938,105 @@ export const MySqlLinkedService: coreClient.CompositeMapper = { } }; -export const PostgreSqlLinkedService: coreClient.CompositeMapper = { - serializedName: "PostgreSql", +export const AzureDataLakeStoreDataset: coreClient.CompositeMapper = { + serializedName: "AzureDataLakeStoreFile", type: { name: "Composite", - className: "PostgreSqlLinkedService", - uberParent: "LinkedService", + className: "AzureDataLakeStoreDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - required: true, + ...Dataset.type.modelProperties, + folderPath: { + serializedName: "typeProperties.folderPath", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + fileName: { + serializedName: "typeProperties.fileName", + type: { + name: "any" + } + }, + format: { + serializedName: "typeProperties.format", type: { name: "Composite", - className: "AzureKeyVaultSecretReference" + className: "DatasetStorageFormat" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + compression: { + serializedName: "typeProperties.compression", type: { - name: "any" + name: "Composite", + className: "DatasetCompression" } } } } }; -export const SybaseLinkedService: coreClient.CompositeMapper = { - serializedName: "Sybase", +export const AzureBlobFSDataset: coreClient.CompositeMapper = { + serializedName: "AzureBlobFSFile", type: { name: "Composite", - className: "SybaseLinkedService", - uberParent: "LinkedService", + className: "AzureBlobFSDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - server: { - serializedName: "typeProperties.server", - required: true, + ...Dataset.type.modelProperties, + folderPath: { + serializedName: "typeProperties.folderPath", type: { name: "any" } }, - database: { - serializedName: "typeProperties.database", - required: true, + fileName: { + serializedName: "typeProperties.fileName", type: { name: "any" } }, - schema: { - serializedName: "typeProperties.schema", + format: { + serializedName: "typeProperties.format", type: { - name: "any" + name: "Composite", + className: "DatasetStorageFormat" } }, - authenticationType: { - serializedName: "typeProperties.authenticationType", + compression: { + serializedName: "typeProperties.compression", type: { - name: "String" + name: "Composite", + className: "DatasetCompression" } - }, - username: { - serializedName: "typeProperties.username", + } + } + } +}; + +export const Office365Dataset: coreClient.CompositeMapper = { + serializedName: "Office365Table", + type: { + name: "Composite", + className: "Office365Dataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + required: true, type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + predicate: { + serializedName: "typeProperties.predicate", type: { name: "any" } @@ -7836,69 +8045,77 @@ export const SybaseLinkedService: coreClient.CompositeMapper = { } }; -export const Db2LinkedService: coreClient.CompositeMapper = { - serializedName: "Db2", +export const FileShareDataset: coreClient.CompositeMapper = { + serializedName: "FileShare", type: { name: "Composite", - className: "Db2LinkedService", - uberParent: "LinkedService", + className: "FileShareDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", + ...Dataset.type.modelProperties, + folderPath: { + serializedName: "typeProperties.folderPath", type: { name: "any" } }, - server: { - serializedName: "typeProperties.server", - required: true, + fileName: { + serializedName: "typeProperties.fileName", type: { name: "any" } }, - database: { - serializedName: "typeProperties.database", - required: true, + modifiedDatetimeStart: { + serializedName: "typeProperties.modifiedDatetimeStart", type: { name: "any" } }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - type: { - name: "String" - } - }, - username: { - serializedName: "typeProperties.username", + modifiedDatetimeEnd: { + serializedName: "typeProperties.modifiedDatetimeEnd", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + format: { + serializedName: "typeProperties.format", type: { name: "Composite", - className: "SecretBase" + className: "DatasetStorageFormat" } }, - packageCollection: { - serializedName: "typeProperties.packageCollection", + fileFilter: { + serializedName: "typeProperties.fileFilter", type: { name: "any" } }, - certificateCommonName: { - serializedName: "typeProperties.certificateCommonName", + compression: { + serializedName: "typeProperties.compression", type: { - name: "any" + name: "Composite", + className: "DatasetCompression" } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + } + } + } +}; + +export const MongoDbCollectionDataset: coreClient.CompositeMapper = { + serializedName: "MongoDbCollection", + type: { + name: "Composite", + className: "MongoDbCollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collectionName: { + serializedName: "typeProperties.collectionName", + required: true, type: { name: "any" } @@ -7907,49 +8124,81 @@ export const Db2LinkedService: coreClient.CompositeMapper = { } }; -export const TeradataLinkedService: coreClient.CompositeMapper = { - serializedName: "Teradata", +export const MongoDbAtlasCollectionDataset: coreClient.CompositeMapper = { + serializedName: "MongoDbAtlasCollection", type: { name: "Composite", - className: "TeradataLinkedService", - uberParent: "LinkedService", + className: "MongoDbAtlasCollectionDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", + ...Dataset.type.modelProperties, + collection: { + serializedName: "typeProperties.collection", + required: true, type: { name: "any" } - }, - server: { - serializedName: "typeProperties.server", + } + } + } +}; + +export const MongoDbV2CollectionDataset: coreClient.CompositeMapper = { + serializedName: "MongoDbV2Collection", + type: { + name: "Composite", + className: "MongoDbV2CollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collection: { + serializedName: "typeProperties.collection", + required: true, type: { name: "any" } - }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - type: { - name: "String" - } - }, - username: { - serializedName: "typeProperties.username", + } + } + } +}; + +export const CosmosDbMongoDbApiCollectionDataset: coreClient.CompositeMapper = { + serializedName: "CosmosDbMongoDbApiCollection", + type: { + name: "Composite", + className: "CosmosDbMongoDbApiCollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collection: { + serializedName: "typeProperties.collection", + required: true, type: { name: "any" } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + } + } + } +}; + +export const ODataResourceDataset: coreClient.CompositeMapper = { + serializedName: "ODataResource", + type: { + name: "Composite", + className: "ODataResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + path: { + serializedName: "typeProperties.path", type: { name: "any" } @@ -7958,57 +8207,56 @@ export const TeradataLinkedService: coreClient.CompositeMapper = { } }; -export const AzureMLLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureML", +export const OracleTableDataset: coreClient.CompositeMapper = { + serializedName: "OracleTable", type: { name: "Composite", - className: "AzureMLLinkedService", - uberParent: "LinkedService", + className: "OracleTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - mlEndpoint: { - serializedName: "typeProperties.mlEndpoint", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - apiKey: { - serializedName: "typeProperties.apiKey", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - updateResourceEndpoint: { - serializedName: "typeProperties.updateResourceEndpoint", + table: { + serializedName: "typeProperties.table", type: { name: "any" } - }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", + } + } + } +}; + +export const AmazonRdsForOracleTableDataset: coreClient.CompositeMapper = { + serializedName: "AmazonRdsForOracleTable", + type: { + name: "Composite", + className: "AmazonRdsForOracleTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", - type: { - name: "Composite", - className: "SecretBase" - } - }, - tenant: { - serializedName: "typeProperties.tenant", - type: { - name: "any" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + table: { + serializedName: "typeProperties.table", type: { name: "any" } @@ -8017,58 +8265,82 @@ export const AzureMLLinkedService: coreClient.CompositeMapper = { } }; -export const AzureMLServiceLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureMLService", +export const TeradataTableDataset: coreClient.CompositeMapper = { + serializedName: "TeradataTable", type: { name: "Composite", - className: "AzureMLServiceLinkedService", - uberParent: "LinkedService", + className: "TeradataTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - subscriptionId: { - serializedName: "typeProperties.subscriptionId", - required: true, + ...Dataset.type.modelProperties, + database: { + serializedName: "typeProperties.database", type: { name: "any" } }, - resourceGroupName: { - serializedName: "typeProperties.resourceGroupName", - required: true, + table: { + serializedName: "typeProperties.table", type: { name: "any" } - }, - mlWorkspaceName: { - serializedName: "typeProperties.mlWorkspaceName", - required: true, + } + } + } +}; + +export const AzureMySqlTableDataset: coreClient.CompositeMapper = { + serializedName: "AzureMySqlTable", + type: { + name: "Composite", + className: "AzureMySqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", + table: { + serializedName: "typeProperties.table", type: { name: "any" } - }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", + } + } + } +}; + +export const AmazonRedshiftTableDataset: coreClient.CompositeMapper = { + serializedName: "AmazonRedshiftTable", + type: { + name: "Composite", + className: "AmazonRedshiftTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - tenant: { - serializedName: "typeProperties.tenant", + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } @@ -8077,51 +8349,30 @@ export const AzureMLServiceLinkedService: coreClient.CompositeMapper = { } }; -export const OdbcLinkedService: coreClient.CompositeMapper = { - serializedName: "Odbc", +export const Db2TableDataset: coreClient.CompositeMapper = { + serializedName: "Db2Table", type: { name: "Composite", - className: "OdbcLinkedService", - uberParent: "LinkedService", + className: "Db2TableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - required: true, - type: { - name: "any" - } - }, - authenticationType: { - serializedName: "typeProperties.authenticationType", + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - credential: { - serializedName: "typeProperties.credential", - type: { - name: "Composite", - className: "SecretBase" - } - }, - userName: { - serializedName: "typeProperties.userName", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + table: { + serializedName: "typeProperties.table", type: { name: "any" } @@ -8130,51 +8381,78 @@ export const OdbcLinkedService: coreClient.CompositeMapper = { } }; -export const InformixLinkedService: coreClient.CompositeMapper = { - serializedName: "Informix", +export const RelationalTableDataset: coreClient.CompositeMapper = { + serializedName: "RelationalTable", type: { name: "Composite", - className: "InformixLinkedService", - uberParent: "LinkedService", + className: "RelationalTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - authenticationType: { - serializedName: "typeProperties.authenticationType", + } + } + } +}; + +export const InformixTableDataset: coreClient.CompositeMapper = { + serializedName: "InformixTable", + type: { + name: "Composite", + className: "InformixTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - credential: { - serializedName: "typeProperties.credential", - type: { - name: "Composite", - className: "SecretBase" - } - }, - userName: { - serializedName: "typeProperties.userName", + } + } + } +}; + +export const OdbcTableDataset: coreClient.CompositeMapper = { + serializedName: "OdbcTable", + type: { + name: "Composite", + className: "OdbcTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + } + } + } +}; + +export const MySqlTableDataset: coreClient.CompositeMapper = { + serializedName: "MySqlTable", + type: { + name: "Composite", + className: "MySqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } @@ -8183,51 +8461,90 @@ export const InformixLinkedService: coreClient.CompositeMapper = { } }; -export const MicrosoftAccessLinkedService: coreClient.CompositeMapper = { - serializedName: "MicrosoftAccess", +export const PostgreSqlTableDataset: coreClient.CompositeMapper = { + serializedName: "PostgreSqlTable", type: { name: "Composite", - className: "MicrosoftAccessLinkedService", - uberParent: "LinkedService", + className: "PostgreSqlTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - authenticationType: { - serializedName: "typeProperties.authenticationType", + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - credential: { - serializedName: "typeProperties.credential", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { - name: "Composite", - className: "SecretBase" + name: "any" } - }, - userName: { - serializedName: "typeProperties.userName", + } + } + } +}; + +export const MicrosoftAccessTableDataset: coreClient.CompositeMapper = { + serializedName: "MicrosoftAccessTable", + type: { + name: "Composite", + className: "MicrosoftAccessTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - password: { - serializedName: "typeProperties.password", + } + } + } +}; + +export const SalesforceObjectDataset: coreClient.CompositeMapper = { + serializedName: "SalesforceObject", + type: { + name: "Composite", + className: "SalesforceObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + objectApiName: { + serializedName: "typeProperties.objectApiName", type: { - name: "Composite", - className: "SecretBase" + name: "any" } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + } + } + } +}; + +export const SalesforceServiceCloudObjectDataset: coreClient.CompositeMapper = { + serializedName: "SalesforceServiceCloudObject", + type: { + name: "Composite", + className: "SalesforceServiceCloudObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + objectApiName: { + serializedName: "typeProperties.objectApiName", type: { name: "any" } @@ -8236,141 +8553,235 @@ export const MicrosoftAccessLinkedService: coreClient.CompositeMapper = { } }; -export const HdfsLinkedService: coreClient.CompositeMapper = { - serializedName: "Hdfs", +export const SybaseTableDataset: coreClient.CompositeMapper = { + serializedName: "SybaseTable", type: { name: "Composite", - className: "HdfsLinkedService", - uberParent: "LinkedService", + className: "SybaseTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - url: { - serializedName: "typeProperties.url", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - authenticationType: { - serializedName: "typeProperties.authenticationType", + } + } + } +}; + +export const SapBwCubeDataset: coreClient.CompositeMapper = { + serializedName: "SapBwCube", + type: { + name: "Composite", + className: "SapBwCubeDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties + } + } +}; + +export const SapCloudForCustomerResourceDataset: coreClient.CompositeMapper = { + serializedName: "SapCloudForCustomerResource", + type: { + name: "Composite", + className: "SapCloudForCustomerResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + path: { + serializedName: "typeProperties.path", + required: true, type: { name: "any" } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + } + } + } +}; + +export const SapEccResourceDataset: coreClient.CompositeMapper = { + serializedName: "SapEccResource", + type: { + name: "Composite", + className: "SapEccResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + path: { + serializedName: "typeProperties.path", + required: true, type: { name: "any" } - }, - userName: { - serializedName: "typeProperties.userName", + } + } + } +}; + +export const SapHanaTableDataset: coreClient.CompositeMapper = { + serializedName: "SapHanaTable", + type: { + name: "Composite", + className: "SapHanaTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + table: { + serializedName: "typeProperties.table", type: { - name: "Composite", - className: "SecretBase" + name: "any" } } } } }; -export const ODataLinkedService: coreClient.CompositeMapper = { - serializedName: "OData", +export const SapOpenHubTableDataset: coreClient.CompositeMapper = { + serializedName: "SapOpenHubTable", type: { name: "Composite", - className: "ODataLinkedService", - uberParent: "LinkedService", + className: "SapOpenHubTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - url: { - serializedName: "typeProperties.url", + ...Dataset.type.modelProperties, + openHubDestinationName: { + serializedName: "typeProperties.openHubDestinationName", required: true, type: { name: "any" } }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - type: { - name: "String" - } - }, - userName: { - serializedName: "typeProperties.userName", + excludeLastRequest: { + serializedName: "typeProperties.excludeLastRequest", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + baseRequestId: { + serializedName: "typeProperties.baseRequestId", type: { - name: "Composite", - className: "SecretBase" + name: "any" } - }, - tenant: { - serializedName: "typeProperties.tenant", + } + } + } +}; + +export const SqlServerTableDataset: coreClient.CompositeMapper = { + serializedName: "SqlServerTable", + type: { + name: "Composite", + className: "SqlServerTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } }, - azureCloudType: { - serializedName: "typeProperties.azureCloudType", + table: { + serializedName: "typeProperties.table", type: { name: "any" } - }, - aadResourceId: { - serializedName: "typeProperties.aadResourceId", + } + } + } +}; + +export const AmazonRdsForSqlServerTableDataset: coreClient.CompositeMapper = { + serializedName: "AmazonRdsForSqlServerTable", + type: { + name: "Composite", + className: "AmazonRdsForSqlServerTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } }, - aadServicePrincipalCredentialType: { - serializedName: "typeProperties.aadServicePrincipalCredentialType", + table: { + serializedName: "typeProperties.table", type: { - name: "String" + name: "any" } - }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", - type: { - name: "Composite", - className: "SecretBase" + } + } + } +}; + +export const RestResourceDataset: coreClient.CompositeMapper = { + serializedName: "RestResource", + type: { + name: "Composite", + className: "RestResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + relativeUrl: { + serializedName: "typeProperties.relativeUrl", + type: { + name: "any" } }, - servicePrincipalEmbeddedCert: { - serializedName: "typeProperties.servicePrincipalEmbeddedCert", + requestMethod: { + serializedName: "typeProperties.requestMethod", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - servicePrincipalEmbeddedCertPassword: { - serializedName: "typeProperties.servicePrincipalEmbeddedCertPassword", + requestBody: { + serializedName: "typeProperties.requestBody", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + additionalHeaders: { + serializedName: "typeProperties.additionalHeaders", + type: { + name: "any" + } + }, + paginationRules: { + serializedName: "typeProperties.paginationRules", type: { name: "any" } @@ -8379,71 +8790,46 @@ export const ODataLinkedService: coreClient.CompositeMapper = { } }; -export const WebLinkedService: coreClient.CompositeMapper = { - serializedName: "Web", +export const SapTableResourceDataset: coreClient.CompositeMapper = { + serializedName: "SapTableResource", type: { name: "Composite", - className: "WebLinkedService", - uberParent: "LinkedService", + className: "SapTableResourceDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - typeProperties: { - serializedName: "typeProperties", + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + required: true, type: { - name: "Composite", - className: "WebLinkedServiceTypeProperties" + name: "any" } } } } }; -export const CassandraLinkedService: coreClient.CompositeMapper = { - serializedName: "Cassandra", +export const WebTableDataset: coreClient.CompositeMapper = { + serializedName: "WebTable", type: { name: "Composite", - className: "CassandraLinkedService", - uberParent: "LinkedService", + className: "WebTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", + ...Dataset.type.modelProperties, + index: { + serializedName: "typeProperties.index", required: true, type: { name: "any" } }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - type: { - name: "any" - } - }, - port: { - serializedName: "typeProperties.port", - type: { - name: "any" - } - }, - username: { - serializedName: "typeProperties.username", - type: { - name: "any" - } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + path: { + serializedName: "typeProperties.path", type: { name: "any" } @@ -8452,103 +8838,91 @@ export const CassandraLinkedService: coreClient.CompositeMapper = { } }; -export const MongoDbLinkedService: coreClient.CompositeMapper = { - serializedName: "MongoDb", +export const AzureSearchIndexDataset: coreClient.CompositeMapper = { + serializedName: "AzureSearchIndex", type: { name: "Composite", - className: "MongoDbLinkedService", - uberParent: "LinkedService", + className: "AzureSearchIndexDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - server: { - serializedName: "typeProperties.server", - required: true, - type: { - name: "any" - } - }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - type: { - name: "String" - } - }, - databaseName: { - serializedName: "typeProperties.databaseName", + ...Dataset.type.modelProperties, + indexName: { + serializedName: "typeProperties.indexName", required: true, type: { name: "any" } - }, - username: { - serializedName: "typeProperties.username", + } + } + } +}; + +export const HttpDataset: coreClient.CompositeMapper = { + serializedName: "HttpFile", + type: { + name: "Composite", + className: "HttpDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + relativeUrl: { + serializedName: "typeProperties.relativeUrl", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - authSource: { - serializedName: "typeProperties.authSource", + requestMethod: { + serializedName: "typeProperties.requestMethod", type: { name: "any" } }, - port: { - serializedName: "typeProperties.port", + requestBody: { + serializedName: "typeProperties.requestBody", type: { name: "any" } }, - enableSsl: { - serializedName: "typeProperties.enableSsl", + additionalHeaders: { + serializedName: "typeProperties.additionalHeaders", type: { name: "any" } }, - allowSelfSignedServerCert: { - serializedName: "typeProperties.allowSelfSignedServerCert", + format: { + serializedName: "typeProperties.format", type: { - name: "any" + name: "Composite", + className: "DatasetStorageFormat" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + compression: { + serializedName: "typeProperties.compression", type: { - name: "any" + name: "Composite", + className: "DatasetCompression" } } } } }; -export const MongoDbAtlasLinkedService: coreClient.CompositeMapper = { - serializedName: "MongoDbAtlas", +export const AmazonMWSObjectDataset: coreClient.CompositeMapper = { + serializedName: "AmazonMWSObject", type: { name: "Composite", - className: "MongoDbAtlasLinkedService", - uberParent: "LinkedService", + className: "AmazonMWSObjectDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - required: true, - type: { - name: "any" - } - }, - database: { - serializedName: "typeProperties.database", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } @@ -8557,26 +8931,30 @@ export const MongoDbAtlasLinkedService: coreClient.CompositeMapper = { } }; -export const MongoDbV2LinkedService: coreClient.CompositeMapper = { - serializedName: "MongoDbV2", +export const AzurePostgreSqlTableDataset: coreClient.CompositeMapper = { + serializedName: "AzurePostgreSqlTable", type: { name: "Composite", - className: "MongoDbV2LinkedService", - uberParent: "LinkedService", + className: "AzurePostgreSqlTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - database: { - serializedName: "typeProperties.database", - required: true, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } @@ -8585,26 +8963,18 @@ export const MongoDbV2LinkedService: coreClient.CompositeMapper = { } }; -export const CosmosDbMongoDbApiLinkedService: coreClient.CompositeMapper = { - serializedName: "CosmosDbMongoDbApi", +export const ConcurObjectDataset: coreClient.CompositeMapper = { + serializedName: "ConcurObject", type: { name: "Composite", - className: "CosmosDbMongoDbApiLinkedService", - uberParent: "LinkedService", + className: "ConcurObjectDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - required: true, - type: { - name: "any" - } - }, - database: { - serializedName: "typeProperties.database", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } @@ -8613,68 +8983,102 @@ export const CosmosDbMongoDbApiLinkedService: coreClient.CompositeMapper = { } }; -export const AzureDataLakeStoreLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureDataLakeStore", +export const CouchbaseTableDataset: coreClient.CompositeMapper = { + serializedName: "CouchbaseTable", type: { name: "Composite", - className: "AzureDataLakeStoreLinkedService", - uberParent: "LinkedService", + className: "CouchbaseTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - dataLakeStoreUri: { - serializedName: "typeProperties.dataLakeStoreUri", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", + } + } + } +}; + +export const DrillTableDataset: coreClient.CompositeMapper = { + serializedName: "DrillTable", + type: { + name: "Composite", + className: "DrillTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", - type: { - name: "Composite", - className: "SecretBase" - } - }, - tenant: { - serializedName: "typeProperties.tenant", + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - azureCloudType: { - serializedName: "typeProperties.azureCloudType", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } - }, - accountName: { - serializedName: "typeProperties.accountName", + } + } + } +}; + +export const EloquaObjectDataset: coreClient.CompositeMapper = { + serializedName: "EloquaObject", + type: { + name: "Composite", + className: "EloquaObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - subscriptionId: { - serializedName: "typeProperties.subscriptionId", + } + } + } +}; + +export const GoogleBigQueryObjectDataset: coreClient.CompositeMapper = { + serializedName: "GoogleBigQueryObject", + type: { + name: "Composite", + className: "GoogleBigQueryObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - resourceGroupName: { - serializedName: "typeProperties.resourceGroupName", + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + dataset: { + serializedName: "typeProperties.dataset", type: { name: "any" } @@ -8683,56 +9087,82 @@ export const AzureDataLakeStoreLinkedService: coreClient.CompositeMapper = { } }; -export const AzureBlobFSLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureBlobFS", +export const GreenplumTableDataset: coreClient.CompositeMapper = { + serializedName: "GreenplumTable", type: { name: "Composite", - className: "AzureBlobFSLinkedService", - uberParent: "LinkedService", + className: "GreenplumTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - url: { - serializedName: "typeProperties.url", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - accountKey: { - serializedName: "typeProperties.accountKey", + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } - }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", + } + } + } +}; + +export const HBaseObjectDataset: coreClient.CompositeMapper = { + serializedName: "HBaseObject", + type: { + name: "Composite", + className: "HBaseObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { - name: "Composite", - className: "SecretBase" + name: "any" } - }, - tenant: { - serializedName: "typeProperties.tenant", + } + } + } +}; + +export const HiveObjectDataset: coreClient.CompositeMapper = { + serializedName: "HiveObject", + type: { + name: "Composite", + className: "HiveObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - azureCloudType: { - serializedName: "typeProperties.azureCloudType", + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } @@ -8741,46 +9171,50 @@ export const AzureBlobFSLinkedService: coreClient.CompositeMapper = { } }; -export const Office365LinkedService: coreClient.CompositeMapper = { - serializedName: "Office365", +export const HubspotObjectDataset: coreClient.CompositeMapper = { + serializedName: "HubspotObject", type: { name: "Composite", - className: "Office365LinkedService", - uberParent: "LinkedService", + className: "HubspotObjectDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - office365TenantId: { - serializedName: "typeProperties.office365TenantId", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - servicePrincipalTenantId: { - serializedName: "typeProperties.servicePrincipalTenantId", - required: true, + } + } + } +}; + +export const ImpalaObjectDataset: coreClient.CompositeMapper = { + serializedName: "ImpalaObject", + type: { + name: "Composite", + className: "ImpalaObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", - required: true, + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", - type: { - name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } @@ -8789,50 +9223,78 @@ export const Office365LinkedService: coreClient.CompositeMapper = { } }; -export const SalesforceLinkedService: coreClient.CompositeMapper = { - serializedName: "Salesforce", +export const JiraObjectDataset: coreClient.CompositeMapper = { + serializedName: "JiraObject", type: { name: "Composite", - className: "SalesforceLinkedService", - uberParent: "LinkedService", + className: "JiraObjectDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - environmentUrl: { - serializedName: "typeProperties.environmentUrl", + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - username: { - serializedName: "typeProperties.username", + } + } + } +}; + +export const MagentoObjectDataset: coreClient.CompositeMapper = { + serializedName: "MagentoObject", + type: { + name: "Composite", + className: "MagentoObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - securityToken: { - serializedName: "typeProperties.securityToken", - type: { - name: "Composite", - className: "SecretBase" - } - }, - apiVersion: { - serializedName: "typeProperties.apiVersion", + } + } + } +}; + +export const MariaDBTableDataset: coreClient.CompositeMapper = { + serializedName: "MariaDBTable", + type: { + name: "Composite", + className: "MariaDBTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + } + } + } +}; + +export const AzureMariaDBTableDataset: coreClient.CompositeMapper = { + serializedName: "AzureMariaDBTable", + type: { + name: "Composite", + className: "AzureMariaDBTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } @@ -8841,56 +9303,70 @@ export const SalesforceLinkedService: coreClient.CompositeMapper = { } }; -export const SalesforceServiceCloudLinkedService: coreClient.CompositeMapper = { - serializedName: "SalesforceServiceCloud", +export const MarketoObjectDataset: coreClient.CompositeMapper = { + serializedName: "MarketoObject", type: { name: "Composite", - className: "SalesforceServiceCloudLinkedService", - uberParent: "LinkedService", + className: "MarketoObjectDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - environmentUrl: { - serializedName: "typeProperties.environmentUrl", + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - username: { - serializedName: "typeProperties.username", + } + } + } +}; + +export const PaypalObjectDataset: coreClient.CompositeMapper = { + serializedName: "PaypalObject", + type: { + name: "Composite", + className: "PaypalObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - securityToken: { - serializedName: "typeProperties.securityToken", - type: { - name: "Composite", - className: "SecretBase" - } - }, - apiVersion: { - serializedName: "typeProperties.apiVersion", + } + } + } +}; + +export const PhoenixObjectDataset: coreClient.CompositeMapper = { + serializedName: "PhoenixObject", + type: { + name: "Composite", + className: "PhoenixObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - extendedProperties: { - serializedName: "typeProperties.extendedProperties", + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } @@ -8899,38 +9375,30 @@ export const SalesforceServiceCloudLinkedService: coreClient.CompositeMapper = { } }; -export const SapCloudForCustomerLinkedService: coreClient.CompositeMapper = { - serializedName: "SapCloudForCustomer", +export const PrestoObjectDataset: coreClient.CompositeMapper = { + serializedName: "PrestoObject", type: { name: "Composite", - className: "SapCloudForCustomerLinkedService", - uberParent: "LinkedService", + className: "PrestoObjectDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - url: { - serializedName: "typeProperties.url", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - username: { - serializedName: "typeProperties.username", + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } @@ -8939,122 +9407,90 @@ export const SapCloudForCustomerLinkedService: coreClient.CompositeMapper = { } }; -export const SapEccLinkedService: coreClient.CompositeMapper = { - serializedName: "SapEcc", +export const QuickBooksObjectDataset: coreClient.CompositeMapper = { + serializedName: "QuickBooksObject", type: { name: "Composite", - className: "SapEccLinkedService", - uberParent: "LinkedService", + className: "QuickBooksObjectDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - url: { - serializedName: "typeProperties.url", - required: true, - type: { - name: "String" - } - }, - username: { - serializedName: "typeProperties.username", - type: { - name: "String" - } - }, - password: { - serializedName: "typeProperties.password", + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { - name: "Composite", - className: "SecretBase" + name: "any" } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + } + } + } +}; + +export const ServiceNowObjectDataset: coreClient.CompositeMapper = { + serializedName: "ServiceNowObject", + type: { + name: "Composite", + className: "ServiceNowObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { - name: "String" + name: "any" } } } } }; -export const SapOpenHubLinkedService: coreClient.CompositeMapper = { - serializedName: "SapOpenHub", +export const ShopifyObjectDataset: coreClient.CompositeMapper = { + serializedName: "ShopifyObject", type: { name: "Composite", - className: "SapOpenHubLinkedService", - uberParent: "LinkedService", + className: "ShopifyObjectDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - server: { - serializedName: "typeProperties.server", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - systemNumber: { - serializedName: "typeProperties.systemNumber", - required: true, - type: { - name: "any" - } - }, - clientId: { - serializedName: "typeProperties.clientId", - required: true, - type: { - name: "any" - } - }, - language: { - serializedName: "typeProperties.language", - type: { - name: "any" - } - }, - systemId: { - serializedName: "typeProperties.systemId", - type: { - name: "any" - } - }, - userName: { - serializedName: "typeProperties.userName", - type: { - name: "any" - } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - messageServer: { - serializedName: "typeProperties.messageServer", - type: { - name: "any" - } - }, - messageServerService: { - serializedName: "typeProperties.messageServerService", + } + } + } +}; + +export const SparkObjectDataset: coreClient.CompositeMapper = { + serializedName: "SparkObject", + type: { + name: "Composite", + className: "SparkObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - logonGroup: { - serializedName: "typeProperties.logonGroup", + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } @@ -9063,82 +9499,90 @@ export const SapOpenHubLinkedService: coreClient.CompositeMapper = { } }; -export const RestServiceLinkedService: coreClient.CompositeMapper = { - serializedName: "RestService", +export const SquareObjectDataset: coreClient.CompositeMapper = { + serializedName: "SquareObject", type: { name: "Composite", - className: "RestServiceLinkedService", - uberParent: "LinkedService", + className: "SquareObjectDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - url: { - serializedName: "typeProperties.url", - required: true, - type: { - name: "any" - } - }, - enableServerCertificateValidation: { - serializedName: "typeProperties.enableServerCertificateValidation", - type: { - name: "any" - } - }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - required: true, - type: { - name: "String" - } - }, - userName: { - serializedName: "typeProperties.userName", + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - password: { - serializedName: "typeProperties.password", - type: { - name: "Composite", - className: "SecretBase" - } - }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", + } + } + } +}; + +export const XeroObjectDataset: coreClient.CompositeMapper = { + serializedName: "XeroObject", + type: { + name: "Composite", + className: "XeroObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", - type: { - name: "Composite", - className: "SecretBase" - } - }, - tenant: { - serializedName: "typeProperties.tenant", + } + } + } +}; + +export const ZohoObjectDataset: coreClient.CompositeMapper = { + serializedName: "ZohoObject", + type: { + name: "Composite", + className: "ZohoObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - azureCloudType: { - serializedName: "typeProperties.azureCloudType", + } + } + } +}; + +export const NetezzaTableDataset: coreClient.CompositeMapper = { + serializedName: "NetezzaTable", + type: { + name: "Composite", + className: "NetezzaTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - aadResourceId: { - serializedName: "typeProperties.aadResourceId", + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } @@ -9147,50 +9591,30 @@ export const RestServiceLinkedService: coreClient.CompositeMapper = { } }; -export const AmazonS3LinkedService: coreClient.CompositeMapper = { - serializedName: "AmazonS3", +export const VerticaTableDataset: coreClient.CompositeMapper = { + serializedName: "VerticaTable", type: { name: "Composite", - className: "AmazonS3LinkedService", - uberParent: "LinkedService", + className: "VerticaTableDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - authenticationType: { - serializedName: "typeProperties.authenticationType", - type: { - name: "any" - } - }, - accessKeyId: { - serializedName: "typeProperties.accessKeyId", + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } }, - secretAccessKey: { - serializedName: "typeProperties.secretAccessKey", - type: { - name: "Composite", - className: "SecretBase" - } - }, - serviceUrl: { - serializedName: "typeProperties.serviceUrl", + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - sessionToken: { - serializedName: "typeProperties.sessionToken", - type: { - name: "Composite", - className: "SecretBase" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } @@ -9199,51 +9623,99 @@ export const AmazonS3LinkedService: coreClient.CompositeMapper = { } }; -export const AmazonRedshiftLinkedService: coreClient.CompositeMapper = { - serializedName: "AmazonRedshift", +export const SalesforceMarketingCloudObjectDataset: coreClient.CompositeMapper = { + serializedName: "SalesforceMarketingCloudObject", type: { name: "Composite", - className: "AmazonRedshiftLinkedService", - uberParent: "LinkedService", + className: "SalesforceMarketingCloudObjectDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - server: { - serializedName: "typeProperties.server", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - username: { - serializedName: "typeProperties.username", - type: { - name: "any" - } - }, - password: { - serializedName: "typeProperties.password", + } + } + } +}; + +export const ResponsysObjectDataset: coreClient.CompositeMapper = { + serializedName: "ResponsysObject", + type: { + name: "Composite", + className: "ResponsysObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { - name: "Composite", - className: "SecretBase" + name: "any" } - }, - database: { - serializedName: "typeProperties.database", + } + } + } +}; + +export const DynamicsAXResourceDataset: coreClient.CompositeMapper = { + serializedName: "DynamicsAXResource", + type: { + name: "Composite", + className: "DynamicsAXResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + path: { + serializedName: "typeProperties.path", required: true, type: { name: "any" } - }, - port: { - serializedName: "typeProperties.port", + } + } + } +}; + +export const OracleServiceCloudObjectDataset: coreClient.CompositeMapper = { + serializedName: "OracleServiceCloudObject", + type: { + name: "Composite", + className: "OracleServiceCloudObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + } + } + } +}; + +export const AzureDataExplorerTableDataset: coreClient.CompositeMapper = { + serializedName: "AzureDataExplorerTable", + type: { + name: "Composite", + className: "AzureDataExplorerTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + table: { + serializedName: "typeProperties.table", type: { name: "any" } @@ -9252,19 +9724,18 @@ export const AmazonRedshiftLinkedService: coreClient.CompositeMapper = { } }; -export const CustomDataSourceLinkedService: coreClient.CompositeMapper = { - serializedName: "CustomDataSource", +export const GoogleAdWordsObjectDataset: coreClient.CompositeMapper = { + serializedName: "GoogleAdWordsObject", type: { name: "Composite", - className: "CustomDataSourceLinkedService", - uberParent: "LinkedService", + className: "GoogleAdWordsObjectDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - typeProperties: { - serializedName: "typeProperties", - required: true, + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", type: { name: "any" } @@ -9273,32 +9744,44 @@ export const CustomDataSourceLinkedService: coreClient.CompositeMapper = { } }; -export const AzureSearchLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureSearch", +export const SnowflakeDataset: coreClient.CompositeMapper = { + serializedName: "SnowflakeTable", type: { name: "Composite", - className: "AzureSearchLinkedService", - uberParent: "LinkedService", + className: "SnowflakeDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - url: { - serializedName: "typeProperties.url", - required: true, + ...Dataset.type.modelProperties, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", type: { name: "any" } }, - key: { - serializedName: "typeProperties.key", + table: { + serializedName: "typeProperties.table", type: { - name: "Composite", - className: "SecretBase" + name: "any" } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + } + } + } +}; + +export const SharePointOnlineListResourceDataset: coreClient.CompositeMapper = { + serializedName: "SharePointOnlineListResource", + type: { + name: "Composite", + className: "SharePointOnlineListResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + listName: { + serializedName: "typeProperties.listName", type: { name: "any" } @@ -9307,210 +9790,258 @@ export const AzureSearchLinkedService: coreClient.CompositeMapper = { } }; -export const HttpLinkedService: coreClient.CompositeMapper = { - serializedName: "HttpServer", +export const AzureDatabricksDeltaLakeDataset: coreClient.CompositeMapper = { + serializedName: "AzureDatabricksDeltaLakeDataset", type: { name: "Composite", - className: "HttpLinkedService", - uberParent: "LinkedService", + className: "AzureDatabricksDeltaLakeDataset", + uberParent: "Dataset", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, modelProperties: { - ...LinkedService.type.modelProperties, - url: { - serializedName: "typeProperties.url", - required: true, + ...Dataset.type.modelProperties, + table: { + serializedName: "typeProperties.table", type: { name: "any" } }, - authenticationType: { - serializedName: "typeProperties.authenticationType", + database: { + serializedName: "typeProperties.database", type: { - name: "String" + name: "any" } - }, - userName: { - serializedName: "typeProperties.userName", + } + } + } +}; + +export const AzureStorageLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureStorage", + type: { + name: "Composite", + className: "AzureStorageLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + accountKey: { + serializedName: "typeProperties.accountKey", type: { name: "Composite", - className: "SecretBase" + className: "AzureKeyVaultSecretReference" } }, - embeddedCertData: { - serializedName: "typeProperties.embeddedCertData", + sasUri: { + serializedName: "typeProperties.sasUri", type: { name: "any" } }, - certThumbprint: { - serializedName: "typeProperties.certThumbprint", + sasToken: { + serializedName: "typeProperties.sasToken", type: { - name: "any" + name: "Composite", + className: "AzureKeyVaultSecretReference" } }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { - name: "any" - } - }, - enableServerCertificateValidation: { - serializedName: "typeProperties.enableServerCertificateValidation", - type: { - name: "any" + name: "String" } } } } }; -export const FtpServerLinkedService: coreClient.CompositeMapper = { - serializedName: "FtpServer", +export const AzureBlobStorageLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureBlobStorage", type: { name: "Composite", - className: "FtpServerLinkedService", + className: "AzureBlobStorageLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", - required: true, + connectionString: { + serializedName: "typeProperties.connectionString", type: { name: "any" } }, - port: { - serializedName: "typeProperties.port", + accountKey: { + serializedName: "typeProperties.accountKey", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + sasUri: { + serializedName: "typeProperties.sasUri", type: { name: "any" } }, - authenticationType: { - serializedName: "typeProperties.authenticationType", + sasToken: { + serializedName: "typeProperties.sasToken", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + serviceEndpoint: { + serializedName: "typeProperties.serviceEndpoint", type: { name: "String" } }, - userName: { - serializedName: "typeProperties.userName", + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", type: { name: "Composite", className: "SecretBase" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + tenant: { + serializedName: "typeProperties.tenant", type: { name: "any" } }, - enableSsl: { - serializedName: "typeProperties.enableSsl", + azureCloudType: { + serializedName: "typeProperties.azureCloudType", type: { name: "any" } }, - enableServerCertificateValidation: { - serializedName: "typeProperties.enableServerCertificateValidation", + accountKind: { + serializedName: "typeProperties.accountKind", type: { - name: "any" + name: "String" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "String" } } } } }; -export const SftpServerLinkedService: coreClient.CompositeMapper = { - serializedName: "Sftp", +export const AzureTableStorageLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureTableStorage", type: { name: "Composite", - className: "SftpServerLinkedService", + className: "AzureTableStorageLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", - required: true, - type: { - name: "any" - } - }, - port: { - serializedName: "typeProperties.port", + connectionString: { + serializedName: "typeProperties.connectionString", type: { name: "any" } }, - authenticationType: { - serializedName: "typeProperties.authenticationType", + accountKey: { + serializedName: "typeProperties.accountKey", type: { - name: "String" + name: "Composite", + className: "AzureKeyVaultSecretReference" } }, - userName: { - serializedName: "typeProperties.userName", + sasUri: { + serializedName: "typeProperties.sasUri", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + sasToken: { + serializedName: "typeProperties.sasToken", type: { name: "Composite", - className: "SecretBase" + className: "AzureKeyVaultSecretReference" } }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", + type: { + name: "String" + } + } + } + } +}; + +export const AzureSqlDWLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureSqlDW", + type: { + name: "Composite", + className: "AzureSqlDWLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, type: { name: "any" } }, - privateKeyPath: { - serializedName: "typeProperties.privateKeyPath", + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { name: "any" } }, - privateKeyContent: { - serializedName: "typeProperties.privateKeyContent", + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", type: { name: "Composite", className: "SecretBase" } }, - passPhrase: { - serializedName: "typeProperties.passPhrase", + tenant: { + serializedName: "typeProperties.tenant", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - skipHostKeyValidation: { - serializedName: "typeProperties.skipHostKeyValidation", + azureCloudType: { + serializedName: "typeProperties.azureCloudType", type: { name: "any" } }, - hostKeyFingerprint: { - serializedName: "typeProperties.hostKeyFingerprint", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -9519,32 +10050,18 @@ export const SftpServerLinkedService: coreClient.CompositeMapper = { } }; -export const SapBWLinkedService: coreClient.CompositeMapper = { - serializedName: "SapBW", +export const SqlServerLinkedService: coreClient.CompositeMapper = { + serializedName: "SqlServer", type: { name: "Composite", - className: "SapBWLinkedService", + className: "SqlServerLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - server: { - serializedName: "typeProperties.server", - required: true, - type: { - name: "any" - } - }, - systemNumber: { - serializedName: "typeProperties.systemNumber", - required: true, - type: { - name: "any" - } - }, - clientId: { - serializedName: "typeProperties.clientId", + connectionString: { + serializedName: "typeProperties.connectionString", required: true, type: { name: "any" @@ -9573,11 +10090,11 @@ export const SapBWLinkedService: coreClient.CompositeMapper = { } }; -export const SapHanaLinkedService: coreClient.CompositeMapper = { - serializedName: "SapHana", +export const AmazonRdsForSqlServerLinkedService: coreClient.CompositeMapper = { + serializedName: "AmazonRdsForSqlServer", type: { name: "Composite", - className: "SapHanaLinkedService", + className: "AmazonRdsForSqlServerLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, @@ -9585,23 +10102,11 @@ export const SapHanaLinkedService: coreClient.CompositeMapper = { ...LinkedService.type.modelProperties, connectionString: { serializedName: "typeProperties.connectionString", - type: { - name: "any" - } - }, - server: { - serializedName: "typeProperties.server", required: true, type: { name: "any" } }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - type: { - name: "String" - } - }, userName: { serializedName: "typeProperties.userName", type: { @@ -9625,72 +10130,51 @@ export const SapHanaLinkedService: coreClient.CompositeMapper = { } }; -export const AmazonMWSLinkedService: coreClient.CompositeMapper = { - serializedName: "AmazonMWS", +export const AzureSqlDatabaseLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureSqlDatabase", type: { name: "Composite", - className: "AmazonMWSLinkedService", + className: "AzureSqlDatabaseLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - endpoint: { - serializedName: "typeProperties.endpoint", - required: true, - type: { - name: "any" - } - }, - marketplaceID: { - serializedName: "typeProperties.marketplaceID", - required: true, - type: { - name: "any" - } - }, - sellerID: { - serializedName: "typeProperties.sellerID", + connectionString: { + serializedName: "typeProperties.connectionString", required: true, type: { name: "any" } }, - mwsAuthToken: { - serializedName: "typeProperties.mwsAuthToken", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", - className: "SecretBase" + className: "AzureKeyVaultSecretReference" } }, - accessKeyId: { - serializedName: "typeProperties.accessKeyId", - required: true, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { name: "any" } }, - secretKey: { - serializedName: "typeProperties.secretKey", + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", type: { name: "Composite", className: "SecretBase" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", - type: { - name: "any" - } - }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", + tenant: { + serializedName: "typeProperties.tenant", type: { name: "any" } }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", + azureCloudType: { + serializedName: "typeProperties.azureCloudType", type: { name: "any" } @@ -9705,11 +10189,11 @@ export const AmazonMWSLinkedService: coreClient.CompositeMapper = { } }; -export const AzurePostgreSqlLinkedService: coreClient.CompositeMapper = { - serializedName: "AzurePostgreSql", +export const AzureSqlMILinkedService: coreClient.CompositeMapper = { + serializedName: "AzureSqlMI", type: { name: "Composite", - className: "AzurePostgreSqlLinkedService", + className: "AzureSqlMILinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, @@ -9717,6 +10201,7 @@ export const AzurePostgreSqlLinkedService: coreClient.CompositeMapper = { ...LinkedService.type.modelProperties, connectionString: { serializedName: "typeProperties.connectionString", + required: true, type: { name: "any" } @@ -9728,6 +10213,31 @@ export const AzurePostgreSqlLinkedService: coreClient.CompositeMapper = { className: "AzureKeyVaultSecretReference" } }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + azureCloudType: { + serializedName: "typeProperties.azureCloudType", + type: { + name: "any" + } + }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -9738,59 +10248,49 @@ export const AzurePostgreSqlLinkedService: coreClient.CompositeMapper = { } }; -export const ConcurLinkedService: coreClient.CompositeMapper = { - serializedName: "Concur", +export const AzureBatchLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureBatch", type: { name: "Composite", - className: "ConcurLinkedService", + className: "AzureBatchLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - connectionProperties: { - serializedName: "typeProperties.connectionProperties", + accountName: { + serializedName: "typeProperties.accountName", + required: true, type: { name: "any" } }, - clientId: { - serializedName: "typeProperties.clientId", + accessKey: { + serializedName: "typeProperties.accessKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + batchUri: { + serializedName: "typeProperties.batchUri", required: true, type: { name: "any" } }, - username: { - serializedName: "typeProperties.username", + poolName: { + serializedName: "typeProperties.poolName", required: true, type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + linkedServiceName: { + serializedName: "typeProperties.linkedServiceName", type: { name: "Composite", - className: "SecretBase" - } - }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", - type: { - name: "any" - } - }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", - type: { - name: "any" - } - }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", - type: { - name: "any" + className: "LinkedServiceReference" } }, encryptedCredential: { @@ -9803,31 +10303,19 @@ export const ConcurLinkedService: coreClient.CompositeMapper = { } }; -export const CouchbaseLinkedService: coreClient.CompositeMapper = { - serializedName: "Couchbase", +export const AzureKeyVaultLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureKeyVault", type: { name: "Composite", - className: "CouchbaseLinkedService", + className: "AzureKeyVaultLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - type: { - name: "any" - } - }, - credString: { - serializedName: "typeProperties.credString", - type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + baseUrl: { + serializedName: "typeProperties.baseUrl", + required: true, type: { name: "any" } @@ -9836,11 +10324,11 @@ export const CouchbaseLinkedService: coreClient.CompositeMapper = { } }; -export const DrillLinkedService: coreClient.CompositeMapper = { - serializedName: "Drill", +export const CosmosDbLinkedService: coreClient.CompositeMapper = { + serializedName: "CosmosDb", type: { name: "Composite", - className: "DrillLinkedService", + className: "CosmosDbLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, @@ -9852,11 +10340,23 @@ export const DrillLinkedService: coreClient.CompositeMapper = { name: "any" } }, - pwd: { - serializedName: "typeProperties.pwd", + accountEndpoint: { + serializedName: "typeProperties.accountEndpoint", + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + type: { + name: "any" + } + }, + accountKey: { + serializedName: "typeProperties.accountKey", type: { name: "Composite", - className: "AzureKeyVaultSecretReference" + className: "SecretBase" } }, encryptedCredential: { @@ -9869,18 +10369,49 @@ export const DrillLinkedService: coreClient.CompositeMapper = { } }; -export const EloquaLinkedService: coreClient.CompositeMapper = { - serializedName: "Eloqua", +export const DynamicsLinkedService: coreClient.CompositeMapper = { + serializedName: "Dynamics", type: { name: "Composite", - className: "EloquaLinkedService", + className: "DynamicsLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - endpoint: { - serializedName: "typeProperties.endpoint", + deploymentType: { + serializedName: "typeProperties.deploymentType", + required: true, + type: { + name: "any" + } + }, + hostName: { + serializedName: "typeProperties.hostName", + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + serviceUri: { + serializedName: "typeProperties.serviceUri", + type: { + name: "any" + } + }, + organizationName: { + serializedName: "typeProperties.organizationName", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", required: true, type: { name: "any" @@ -9888,7 +10419,6 @@ export const EloquaLinkedService: coreClient.CompositeMapper = { }, username: { serializedName: "typeProperties.username", - required: true, type: { name: "any" } @@ -9900,22 +10430,23 @@ export const EloquaLinkedService: coreClient.CompositeMapper = { className: "SecretBase" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { name: "any" } }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", + servicePrincipalCredentialType: { + serializedName: "typeProperties.servicePrincipalCredentialType", type: { name: "any" } }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", + servicePrincipalCredential: { + serializedName: "typeProperties.servicePrincipalCredential", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, encryptedCredential: { @@ -9928,84 +10459,84 @@ export const EloquaLinkedService: coreClient.CompositeMapper = { } }; -export const GoogleBigQueryLinkedService: coreClient.CompositeMapper = { - serializedName: "GoogleBigQuery", +export const DynamicsCrmLinkedService: coreClient.CompositeMapper = { + serializedName: "DynamicsCrm", type: { name: "Composite", - className: "GoogleBigQueryLinkedService", + className: "DynamicsCrmLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - project: { - serializedName: "typeProperties.project", + deploymentType: { + serializedName: "typeProperties.deploymentType", required: true, type: { name: "any" } }, - additionalProjects: { - serializedName: "typeProperties.additionalProjects", + hostName: { + serializedName: "typeProperties.hostName", type: { name: "any" } }, - requestGoogleDriveScope: { - serializedName: "typeProperties.requestGoogleDriveScope", + port: { + serializedName: "typeProperties.port", type: { name: "any" } }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - required: true, + serviceUri: { + serializedName: "typeProperties.serviceUri", type: { - name: "String" + name: "any" } }, - refreshToken: { - serializedName: "typeProperties.refreshToken", + organizationName: { + serializedName: "typeProperties.organizationName", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - clientId: { - serializedName: "typeProperties.clientId", + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, type: { name: "any" } }, - clientSecret: { - serializedName: "typeProperties.clientSecret", + username: { + serializedName: "typeProperties.username", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - email: { - serializedName: "typeProperties.email", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - keyFilePath: { - serializedName: "typeProperties.keyFilePath", + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { name: "any" } }, - trustedCertPath: { - serializedName: "typeProperties.trustedCertPath", + servicePrincipalCredentialType: { + serializedName: "typeProperties.servicePrincipalCredentialType", type: { name: "any" } }, - useSystemTrustStore: { - serializedName: "typeProperties.useSystemTrustStore", + servicePrincipalCredential: { + serializedName: "typeProperties.servicePrincipalCredential", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, encryptedCredential: { @@ -10018,64 +10549,43 @@ export const GoogleBigQueryLinkedService: coreClient.CompositeMapper = { } }; -export const GreenplumLinkedService: coreClient.CompositeMapper = { - serializedName: "Greenplum", +export const CommonDataServiceForAppsLinkedService: coreClient.CompositeMapper = { + serializedName: "CommonDataServiceForApps", type: { name: "Composite", - className: "GreenplumLinkedService", + className: "CommonDataServiceForAppsLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", + deploymentType: { + serializedName: "typeProperties.deploymentType", + required: true, type: { name: "any" } }, - pwd: { - serializedName: "typeProperties.pwd", - type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + hostName: { + serializedName: "typeProperties.hostName", type: { name: "any" } - } - } - } -}; - -export const HBaseLinkedService: coreClient.CompositeMapper = { - serializedName: "HBase", - type: { - name: "Composite", - className: "HBaseLinkedService", - uberParent: "LinkedService", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, - modelProperties: { - ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", - required: true, + }, + port: { + serializedName: "typeProperties.port", type: { name: "any" } }, - port: { - serializedName: "typeProperties.port", + serviceUri: { + serializedName: "typeProperties.serviceUri", type: { name: "any" } }, - httpPath: { - serializedName: "typeProperties.httpPath", + organizationName: { + serializedName: "typeProperties.organizationName", type: { name: "any" } @@ -10084,7 +10594,7 @@ export const HBaseLinkedService: coreClient.CompositeMapper = { serializedName: "typeProperties.authenticationType", required: true, type: { - name: "String" + name: "any" } }, username: { @@ -10100,28 +10610,23 @@ export const HBaseLinkedService: coreClient.CompositeMapper = { className: "SecretBase" } }, - enableSsl: { - serializedName: "typeProperties.enableSsl", + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { name: "any" } }, - trustedCertPath: { - serializedName: "typeProperties.trustedCertPath", + servicePrincipalCredentialType: { + serializedName: "typeProperties.servicePrincipalCredentialType", type: { name: "any" } }, - allowHostNameCNMismatch: { - serializedName: "typeProperties.allowHostNameCNMismatch", + servicePrincipalCredential: { + serializedName: "typeProperties.servicePrincipalCredential", type: { - name: "any" - } - }, - allowSelfSignedServerCert: { - serializedName: "typeProperties.allowSelfSignedServerCert", - type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, encryptedCredential: { @@ -10134,68 +10639,25 @@ export const HBaseLinkedService: coreClient.CompositeMapper = { } }; -export const HiveLinkedService: coreClient.CompositeMapper = { - serializedName: "Hive", +export const HDInsightLinkedService: coreClient.CompositeMapper = { + serializedName: "HDInsight", type: { name: "Composite", - className: "HiveLinkedService", + className: "HDInsightLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", - required: true, - type: { - name: "any" - } - }, - port: { - serializedName: "typeProperties.port", - type: { - name: "any" - } - }, - serverType: { - serializedName: "typeProperties.serverType", - type: { - name: "String" - } - }, - thriftTransportProtocol: { - serializedName: "typeProperties.thriftTransportProtocol", - type: { - name: "String" - } - }, - authenticationType: { - serializedName: "typeProperties.authenticationType", + clusterUri: { + serializedName: "typeProperties.clusterUri", required: true, - type: { - name: "String" - } - }, - serviceDiscoveryMode: { - serializedName: "typeProperties.serviceDiscoveryMode", - type: { - name: "any" - } - }, - zooKeeperNameSpace: { - serializedName: "typeProperties.zooKeeperNameSpace", - type: { - name: "any" - } - }, - useNativeQuery: { - serializedName: "typeProperties.useNativeQuery", type: { name: "any" } }, - username: { - serializedName: "typeProperties.username", + userName: { + serializedName: "typeProperties.userName", type: { name: "any" } @@ -10207,44 +10669,34 @@ export const HiveLinkedService: coreClient.CompositeMapper = { className: "SecretBase" } }, - httpPath: { - serializedName: "typeProperties.httpPath", - type: { - name: "any" - } - }, - enableSsl: { - serializedName: "typeProperties.enableSsl", - type: { - name: "any" - } - }, - trustedCertPath: { - serializedName: "typeProperties.trustedCertPath", + linkedServiceName: { + serializedName: "typeProperties.linkedServiceName", type: { - name: "any" + name: "Composite", + className: "LinkedServiceReference" } }, - useSystemTrustStore: { - serializedName: "typeProperties.useSystemTrustStore", + hcatalogLinkedServiceName: { + serializedName: "typeProperties.hcatalogLinkedServiceName", type: { - name: "any" + name: "Composite", + className: "LinkedServiceReference" } }, - allowHostNameCNMismatch: { - serializedName: "typeProperties.allowHostNameCNMismatch", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } }, - allowSelfSignedServerCert: { - serializedName: "typeProperties.allowSelfSignedServerCert", + isEspEnabled: { + serializedName: "typeProperties.isEspEnabled", type: { name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + fileSystem: { + serializedName: "typeProperties.fileSystem", type: { name: "any" } @@ -10253,62 +10705,36 @@ export const HiveLinkedService: coreClient.CompositeMapper = { } }; -export const HubspotLinkedService: coreClient.CompositeMapper = { - serializedName: "Hubspot", +export const FileServerLinkedService: coreClient.CompositeMapper = { + serializedName: "FileServer", type: { name: "Composite", - className: "HubspotLinkedService", + className: "FileServerLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - clientId: { - serializedName: "typeProperties.clientId", + host: { + serializedName: "typeProperties.host", required: true, type: { name: "any" } }, - clientSecret: { - serializedName: "typeProperties.clientSecret", - type: { - name: "Composite", - className: "SecretBase" - } - }, - accessToken: { - serializedName: "typeProperties.accessToken", + userId: { + serializedName: "typeProperties.userId", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - refreshToken: { - serializedName: "typeProperties.refreshToken", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", className: "SecretBase" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", - type: { - name: "any" - } - }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", - type: { - name: "any" - } - }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", - type: { - name: "any" - } - }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -10319,11 +10745,11 @@ export const HubspotLinkedService: coreClient.CompositeMapper = { } }; -export const ImpalaLinkedService: coreClient.CompositeMapper = { - serializedName: "Impala", +export const AzureFileStorageLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureFileStorage", type: { name: "Composite", - className: "ImpalaLinkedService", + className: "AzureFileStorageLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, @@ -10336,21 +10762,8 @@ export const ImpalaLinkedService: coreClient.CompositeMapper = { name: "any" } }, - port: { - serializedName: "typeProperties.port", - type: { - name: "any" - } - }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - required: true, - type: { - name: "String" - } - }, - username: { - serializedName: "typeProperties.username", + userId: { + serializedName: "typeProperties.userId", type: { name: "any" } @@ -10362,32 +10775,40 @@ export const ImpalaLinkedService: coreClient.CompositeMapper = { className: "SecretBase" } }, - enableSsl: { - serializedName: "typeProperties.enableSsl", + connectionString: { + serializedName: "typeProperties.connectionString", type: { name: "any" } }, - trustedCertPath: { - serializedName: "typeProperties.trustedCertPath", + accountKey: { + serializedName: "typeProperties.accountKey", type: { - name: "any" + name: "Composite", + className: "AzureKeyVaultSecretReference" } }, - useSystemTrustStore: { - serializedName: "typeProperties.useSystemTrustStore", + sasUri: { + serializedName: "typeProperties.sasUri", type: { name: "any" } }, - allowHostNameCNMismatch: { - serializedName: "typeProperties.allowHostNameCNMismatch", + sasToken: { + serializedName: "typeProperties.sasToken", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + fileShare: { + serializedName: "typeProperties.fileShare", type: { name: "any" } }, - allowSelfSignedServerCert: { - serializedName: "typeProperties.allowSelfSignedServerCert", + snapshot: { + serializedName: "typeProperties.snapshot", type: { name: "any" } @@ -10402,57 +10823,31 @@ export const ImpalaLinkedService: coreClient.CompositeMapper = { } }; -export const JiraLinkedService: coreClient.CompositeMapper = { - serializedName: "Jira", +export const GoogleCloudStorageLinkedService: coreClient.CompositeMapper = { + serializedName: "GoogleCloudStorage", type: { name: "Composite", - className: "JiraLinkedService", + className: "GoogleCloudStorageLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", - required: true, - type: { - name: "any" - } - }, - port: { - serializedName: "typeProperties.port", - type: { - name: "any" - } - }, - username: { - serializedName: "typeProperties.username", - required: true, + accessKeyId: { + serializedName: "typeProperties.accessKeyId", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + secretAccessKey: { + serializedName: "typeProperties.secretAccessKey", type: { name: "Composite", className: "SecretBase" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", - type: { - name: "any" - } - }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", - type: { - name: "any" - } - }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", + serviceUrl: { + serializedName: "typeProperties.serviceUrl", type: { name: "any" } @@ -10467,46 +10862,62 @@ export const JiraLinkedService: coreClient.CompositeMapper = { } }; -export const MagentoLinkedService: coreClient.CompositeMapper = { - serializedName: "Magento", +export const OracleLinkedService: coreClient.CompositeMapper = { + serializedName: "Oracle", type: { name: "Composite", - className: "MagentoLinkedService", + className: "OracleLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", + connectionString: { + serializedName: "typeProperties.connectionString", required: true, type: { name: "any" } }, - accessToken: { - serializedName: "typeProperties.accessToken", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", - className: "SecretBase" + className: "AzureKeyVaultSecretReference" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } - }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", + } + } + } +}; + +export const AmazonRdsForOracleLinkedService: coreClient.CompositeMapper = { + serializedName: "AmazonRdsForOracle", + type: { + name: "Composite", + className: "AmazonRdsForOracleLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, type: { name: "any" } }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, encryptedCredential: { @@ -10519,11 +10930,11 @@ export const MagentoLinkedService: coreClient.CompositeMapper = { } }; -export const MariaDBLinkedService: coreClient.CompositeMapper = { - serializedName: "MariaDB", +export const AzureMySqlLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureMySql", type: { name: "Composite", - className: "MariaDBLinkedService", + className: "AzureMySqlLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, @@ -10531,12 +10942,13 @@ export const MariaDBLinkedService: coreClient.CompositeMapper = { ...LinkedService.type.modelProperties, connectionString: { serializedName: "typeProperties.connectionString", + required: true, type: { name: "any" } }, - pwd: { - serializedName: "typeProperties.pwd", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", className: "AzureKeyVaultSecretReference" @@ -10552,11 +10964,11 @@ export const MariaDBLinkedService: coreClient.CompositeMapper = { } }; -export const AzureMariaDBLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureMariaDB", +export const MySqlLinkedService: coreClient.CompositeMapper = { + serializedName: "MySql", type: { name: "Composite", - className: "AzureMariaDBLinkedService", + className: "MySqlLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, @@ -10564,12 +10976,13 @@ export const AzureMariaDBLinkedService: coreClient.CompositeMapper = { ...LinkedService.type.modelProperties, connectionString: { serializedName: "typeProperties.connectionString", + required: true, type: { name: "any" } }, - pwd: { - serializedName: "typeProperties.pwd", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", className: "AzureKeyVaultSecretReference" @@ -10585,53 +10998,28 @@ export const AzureMariaDBLinkedService: coreClient.CompositeMapper = { } }; -export const MarketoLinkedService: coreClient.CompositeMapper = { - serializedName: "Marketo", +export const PostgreSqlLinkedService: coreClient.CompositeMapper = { + serializedName: "PostgreSql", type: { name: "Composite", - className: "MarketoLinkedService", + className: "PostgreSqlLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - endpoint: { - serializedName: "typeProperties.endpoint", - required: true, - type: { - name: "any" - } - }, - clientId: { - serializedName: "typeProperties.clientId", + connectionString: { + serializedName: "typeProperties.connectionString", required: true, type: { name: "any" } }, - clientSecret: { - serializedName: "typeProperties.clientSecret", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", - className: "SecretBase" - } - }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", - type: { - name: "any" - } - }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", - type: { - name: "any" - } - }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", - type: { - name: "any" + className: "AzureKeyVaultSecretReference" } }, encryptedCredential: { @@ -10644,53 +11032,53 @@ export const MarketoLinkedService: coreClient.CompositeMapper = { } }; -export const PaypalLinkedService: coreClient.CompositeMapper = { - serializedName: "Paypal", +export const SybaseLinkedService: coreClient.CompositeMapper = { + serializedName: "Sybase", type: { name: "Composite", - className: "PaypalLinkedService", + className: "SybaseLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", + server: { + serializedName: "typeProperties.server", required: true, type: { name: "any" } }, - clientId: { - serializedName: "typeProperties.clientId", + database: { + serializedName: "typeProperties.database", required: true, type: { name: "any" } }, - clientSecret: { - serializedName: "typeProperties.clientSecret", + schema: { + serializedName: "typeProperties.schema", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", + authenticationType: { + serializedName: "typeProperties.authenticationType", type: { - name: "any" + name: "String" } }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", + username: { + serializedName: "typeProperties.username", type: { name: "any" } }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, encryptedCredential: { @@ -10703,38 +11091,38 @@ export const PaypalLinkedService: coreClient.CompositeMapper = { } }; -export const PhoenixLinkedService: coreClient.CompositeMapper = { - serializedName: "Phoenix", +export const Db2LinkedService: coreClient.CompositeMapper = { + serializedName: "Db2", type: { name: "Composite", - className: "PhoenixLinkedService", + className: "Db2LinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", - required: true, + connectionString: { + serializedName: "typeProperties.connectionString", type: { name: "any" } }, - port: { - serializedName: "typeProperties.port", + server: { + serializedName: "typeProperties.server", + required: true, type: { name: "any" } }, - httpPath: { - serializedName: "typeProperties.httpPath", + database: { + serializedName: "typeProperties.database", + required: true, type: { name: "any" } }, authenticationType: { serializedName: "typeProperties.authenticationType", - required: true, type: { name: "String" } @@ -10752,32 +11140,14 @@ export const PhoenixLinkedService: coreClient.CompositeMapper = { className: "SecretBase" } }, - enableSsl: { - serializedName: "typeProperties.enableSsl", - type: { - name: "any" - } - }, - trustedCertPath: { - serializedName: "typeProperties.trustedCertPath", - type: { - name: "any" - } - }, - useSystemTrustStore: { - serializedName: "typeProperties.useSystemTrustStore", - type: { - name: "any" - } - }, - allowHostNameCNMismatch: { - serializedName: "typeProperties.allowHostNameCNMismatch", + packageCollection: { + serializedName: "typeProperties.packageCollection", type: { name: "any" } }, - allowSelfSignedServerCert: { - serializedName: "typeProperties.allowSelfSignedServerCert", + certificateCommonName: { + serializedName: "typeProperties.certificateCommonName", type: { name: "any" } @@ -10792,46 +11162,30 @@ export const PhoenixLinkedService: coreClient.CompositeMapper = { } }; -export const PrestoLinkedService: coreClient.CompositeMapper = { - serializedName: "Presto", +export const TeradataLinkedService: coreClient.CompositeMapper = { + serializedName: "Teradata", type: { name: "Composite", - className: "PrestoLinkedService", + className: "TeradataLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", - required: true, - type: { - name: "any" - } - }, - serverVersion: { - serializedName: "typeProperties.serverVersion", - required: true, - type: { - name: "any" - } - }, - catalog: { - serializedName: "typeProperties.catalog", - required: true, + connectionString: { + serializedName: "typeProperties.connectionString", type: { name: "any" } }, - port: { - serializedName: "typeProperties.port", + server: { + serializedName: "typeProperties.server", type: { name: "any" } }, authenticationType: { serializedName: "typeProperties.authenticationType", - required: true, type: { name: "String" } @@ -10849,38 +11203,61 @@ export const PrestoLinkedService: coreClient.CompositeMapper = { className: "SecretBase" } }, - enableSsl: { - serializedName: "typeProperties.enableSsl", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } - }, - trustedCertPath: { - serializedName: "typeProperties.trustedCertPath", + } + } + } +}; + +export const AzureMLLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureML", + type: { + name: "Composite", + className: "AzureMLLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + mlEndpoint: { + serializedName: "typeProperties.mlEndpoint", + required: true, type: { name: "any" } }, - useSystemTrustStore: { - serializedName: "typeProperties.useSystemTrustStore", + apiKey: { + serializedName: "typeProperties.apiKey", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - allowHostNameCNMismatch: { - serializedName: "typeProperties.allowHostNameCNMismatch", + updateResourceEndpoint: { + serializedName: "typeProperties.updateResourceEndpoint", type: { name: "any" } }, - allowSelfSignedServerCert: { - serializedName: "typeProperties.allowSelfSignedServerCert", + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { name: "any" } }, - timeZoneID: { - serializedName: "typeProperties.timeZoneID", + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", type: { name: "any" } @@ -10895,66 +11272,52 @@ export const PrestoLinkedService: coreClient.CompositeMapper = { } }; -export const QuickBooksLinkedService: coreClient.CompositeMapper = { - serializedName: "QuickBooks", +export const AzureMLServiceLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureMLService", type: { name: "Composite", - className: "QuickBooksLinkedService", + className: "AzureMLServiceLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - connectionProperties: { - serializedName: "typeProperties.connectionProperties", - type: { - name: "any" - } - }, - endpoint: { - serializedName: "typeProperties.endpoint", + subscriptionId: { + serializedName: "typeProperties.subscriptionId", required: true, type: { name: "any" } }, - companyId: { - serializedName: "typeProperties.companyId", + resourceGroupName: { + serializedName: "typeProperties.resourceGroupName", required: true, type: { name: "any" } }, - consumerKey: { - serializedName: "typeProperties.consumerKey", + mlWorkspaceName: { + serializedName: "typeProperties.mlWorkspaceName", required: true, type: { name: "any" } }, - consumerSecret: { - serializedName: "typeProperties.consumerSecret", - type: { - name: "Composite", - className: "SecretBase" - } - }, - accessToken: { - serializedName: "typeProperties.accessToken", + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - accessTokenSecret: { - serializedName: "typeProperties.accessTokenSecret", + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", type: { name: "Composite", className: "SecretBase" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", + tenant: { + serializedName: "typeProperties.tenant", type: { name: "any" } @@ -10969,18 +11332,18 @@ export const QuickBooksLinkedService: coreClient.CompositeMapper = { } }; -export const ServiceNowLinkedService: coreClient.CompositeMapper = { - serializedName: "ServiceNow", +export const OdbcLinkedService: coreClient.CompositeMapper = { + serializedName: "Odbc", type: { name: "Composite", - className: "ServiceNowLinkedService", + className: "OdbcLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - endpoint: { - serializedName: "typeProperties.endpoint", + connectionString: { + serializedName: "typeProperties.connectionString", required: true, type: { name: "any" @@ -10988,55 +11351,30 @@ export const ServiceNowLinkedService: coreClient.CompositeMapper = { }, authenticationType: { serializedName: "typeProperties.authenticationType", - required: true, - type: { - name: "String" - } - }, - username: { - serializedName: "typeProperties.username", type: { name: "any" } }, - password: { - serializedName: "typeProperties.password", + credential: { + serializedName: "typeProperties.credential", type: { name: "Composite", className: "SecretBase" } }, - clientId: { - serializedName: "typeProperties.clientId", + userName: { + serializedName: "typeProperties.userName", type: { name: "any" } }, - clientSecret: { - serializedName: "typeProperties.clientSecret", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", className: "SecretBase" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", - type: { - name: "any" - } - }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", - type: { - name: "any" - } - }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", - type: { - name: "any" - } - }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -11047,46 +11385,47 @@ export const ServiceNowLinkedService: coreClient.CompositeMapper = { } }; -export const ShopifyLinkedService: coreClient.CompositeMapper = { - serializedName: "Shopify", +export const InformixLinkedService: coreClient.CompositeMapper = { + serializedName: "Informix", type: { name: "Composite", - className: "ShopifyLinkedService", + className: "InformixLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", + connectionString: { + serializedName: "typeProperties.connectionString", required: true, type: { name: "any" } }, - accessToken: { - serializedName: "typeProperties.accessToken", + authenticationType: { + serializedName: "typeProperties.authenticationType", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", + credential: { + serializedName: "typeProperties.credential", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", + userName: { + serializedName: "typeProperties.userName", type: { name: "any" } }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, encryptedCredential: { @@ -11099,51 +11438,38 @@ export const ShopifyLinkedService: coreClient.CompositeMapper = { } }; -export const SparkLinkedService: coreClient.CompositeMapper = { - serializedName: "Spark", +export const MicrosoftAccessLinkedService: coreClient.CompositeMapper = { + serializedName: "MicrosoftAccess", type: { name: "Composite", - className: "SparkLinkedService", + className: "MicrosoftAccessLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", + connectionString: { + serializedName: "typeProperties.connectionString", required: true, type: { name: "any" } }, - port: { - serializedName: "typeProperties.port", - required: true, + authenticationType: { + serializedName: "typeProperties.authenticationType", type: { name: "any" } }, - serverType: { - serializedName: "typeProperties.serverType", - type: { - name: "String" - } - }, - thriftTransportProtocol: { - serializedName: "typeProperties.thriftTransportProtocol", - type: { - name: "String" - } - }, - authenticationType: { - serializedName: "typeProperties.authenticationType", - required: true, + credential: { + serializedName: "typeProperties.credential", type: { - name: "String" + name: "Composite", + className: "SecretBase" } }, - username: { - serializedName: "typeProperties.username", + userName: { + serializedName: "typeProperties.userName", type: { name: "any" } @@ -11155,114 +11481,149 @@ export const SparkLinkedService: coreClient.CompositeMapper = { className: "SecretBase" } }, - httpPath: { - serializedName: "typeProperties.httpPath", - type: { - name: "any" - } - }, - enableSsl: { - serializedName: "typeProperties.enableSsl", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } - }, - trustedCertPath: { - serializedName: "typeProperties.trustedCertPath", + } + } + } +}; + +export const HdfsLinkedService: coreClient.CompositeMapper = { + serializedName: "Hdfs", + type: { + name: "Composite", + className: "HdfsLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, type: { name: "any" } }, - useSystemTrustStore: { - serializedName: "typeProperties.useSystemTrustStore", + authenticationType: { + serializedName: "typeProperties.authenticationType", type: { name: "any" } }, - allowHostNameCNMismatch: { - serializedName: "typeProperties.allowHostNameCNMismatch", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } }, - allowSelfSignedServerCert: { - serializedName: "typeProperties.allowSelfSignedServerCert", + userName: { + serializedName: "typeProperties.userName", type: { name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } } } } }; -export const SquareLinkedService: coreClient.CompositeMapper = { - serializedName: "Square", +export const ODataLinkedService: coreClient.CompositeMapper = { + serializedName: "OData", type: { name: "Composite", - className: "SquareLinkedService", + className: "ODataLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - connectionProperties: { - serializedName: "typeProperties.connectionProperties", + url: { + serializedName: "typeProperties.url", + required: true, type: { name: "any" } }, - host: { - serializedName: "typeProperties.host", - required: true, + authenticationType: { + serializedName: "typeProperties.authenticationType", type: { - name: "any" + name: "String" } }, - clientId: { - serializedName: "typeProperties.clientId", - required: true, + userName: { + serializedName: "typeProperties.userName", type: { name: "any" } }, - clientSecret: { - serializedName: "typeProperties.clientSecret", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", className: "SecretBase" } }, - redirectUri: { - serializedName: "typeProperties.redirectUri", - required: true, + tenant: { + serializedName: "typeProperties.tenant", type: { name: "any" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { name: "any" } }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", + azureCloudType: { + serializedName: "typeProperties.azureCloudType", type: { name: "any" } }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", + aadResourceId: { + serializedName: "typeProperties.aadResourceId", type: { name: "any" } }, + aadServicePrincipalCredentialType: { + serializedName: "typeProperties.aadServicePrincipalCredentialType", + type: { + name: "String" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalEmbeddedCert: { + serializedName: "typeProperties.servicePrincipalEmbeddedCert", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalEmbeddedCertPassword: { + serializedName: "typeProperties.servicePrincipalEmbeddedCertPassword", + type: { + name: "Composite", + className: "SecretBase" + } + }, encryptedCredential: { serializedName: "typeProperties.encryptedCredential", type: { @@ -11273,22 +11634,37 @@ export const SquareLinkedService: coreClient.CompositeMapper = { } }; -export const XeroLinkedService: coreClient.CompositeMapper = { - serializedName: "Xero", +export const WebLinkedService: coreClient.CompositeMapper = { + serializedName: "Web", type: { name: "Composite", - className: "XeroLinkedService", + className: "WebLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - connectionProperties: { - serializedName: "typeProperties.connectionProperties", + typeProperties: { + serializedName: "typeProperties", type: { - name: "any" + name: "Composite", + className: "WebLinkedServiceTypeProperties" } - }, + } + } + } +}; + +export const CassandraLinkedService: coreClient.CompositeMapper = { + serializedName: "Cassandra", + type: { + name: "Composite", + className: "CassandraLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, host: { serializedName: "typeProperties.host", required: true, @@ -11296,36 +11672,29 @@ export const XeroLinkedService: coreClient.CompositeMapper = { name: "any" } }, - consumerKey: { - serializedName: "typeProperties.consumerKey", - type: { - name: "Composite", - className: "SecretBase" - } - }, - privateKey: { - serializedName: "typeProperties.privateKey", + authenticationType: { + serializedName: "typeProperties.authenticationType", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", + port: { + serializedName: "typeProperties.port", type: { name: "any" } }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", + username: { + serializedName: "typeProperties.username", type: { name: "any" } }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, encryptedCredential: { @@ -11338,50 +11707,69 @@ export const XeroLinkedService: coreClient.CompositeMapper = { } }; -export const ZohoLinkedService: coreClient.CompositeMapper = { - serializedName: "Zoho", +export const MongoDbLinkedService: coreClient.CompositeMapper = { + serializedName: "MongoDb", type: { name: "Composite", - className: "ZohoLinkedService", + className: "MongoDbLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - connectionProperties: { - serializedName: "typeProperties.connectionProperties", + server: { + serializedName: "typeProperties.server", + required: true, type: { name: "any" } }, - endpoint: { - serializedName: "typeProperties.endpoint", + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + databaseName: { + serializedName: "typeProperties.databaseName", required: true, type: { name: "any" } }, - accessToken: { - serializedName: "typeProperties.accessToken", + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", type: { name: "Composite", className: "SecretBase" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", + authSource: { + serializedName: "typeProperties.authSource", type: { name: "any" } }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", + port: { + serializedName: "typeProperties.port", type: { name: "any" } }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", type: { name: "any" } @@ -11396,11 +11784,11 @@ export const ZohoLinkedService: coreClient.CompositeMapper = { } }; -export const VerticaLinkedService: coreClient.CompositeMapper = { - serializedName: "Vertica", +export const MongoDbAtlasLinkedService: coreClient.CompositeMapper = { + serializedName: "MongoDbAtlas", type: { name: "Composite", - className: "VerticaLinkedService", + className: "MongoDbAtlasLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, @@ -11408,19 +11796,14 @@ export const VerticaLinkedService: coreClient.CompositeMapper = { ...LinkedService.type.modelProperties, connectionString: { serializedName: "typeProperties.connectionString", + required: true, type: { name: "any" } }, - pwd: { - serializedName: "typeProperties.pwd", - type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + database: { + serializedName: "typeProperties.database", + required: true, type: { name: "any" } @@ -11429,11 +11812,11 @@ export const VerticaLinkedService: coreClient.CompositeMapper = { } }; -export const NetezzaLinkedService: coreClient.CompositeMapper = { - serializedName: "Netezza", +export const MongoDbV2LinkedService: coreClient.CompositeMapper = { + serializedName: "MongoDbV2", type: { name: "Composite", - className: "NetezzaLinkedService", + className: "MongoDbV2LinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, @@ -11441,19 +11824,42 @@ export const NetezzaLinkedService: coreClient.CompositeMapper = { ...LinkedService.type.modelProperties, connectionString: { serializedName: "typeProperties.connectionString", + required: true, type: { name: "any" } }, - pwd: { - serializedName: "typeProperties.pwd", + database: { + serializedName: "typeProperties.database", + required: true, type: { - name: "Composite", - className: "AzureKeyVaultSecretReference" + name: "any" + } + } + } + } +}; + +export const CosmosDbMongoDbApiLinkedService: coreClient.CompositeMapper = { + serializedName: "CosmosDbMongoDbApi", + type: { + name: "Composite", + className: "CosmosDbMongoDbApiLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + database: { + serializedName: "typeProperties.database", + required: true, type: { name: "any" } @@ -11462,50 +11868,62 @@ export const NetezzaLinkedService: coreClient.CompositeMapper = { } }; -export const SalesforceMarketingCloudLinkedService: coreClient.CompositeMapper = { - serializedName: "SalesforceMarketingCloud", +export const AzureDataLakeStoreLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureDataLakeStore", type: { name: "Composite", - className: "SalesforceMarketingCloudLinkedService", + className: "AzureDataLakeStoreLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - connectionProperties: { - serializedName: "typeProperties.connectionProperties", + dataLakeStoreUri: { + serializedName: "typeProperties.dataLakeStoreUri", + required: true, type: { name: "any" } }, - clientId: { - serializedName: "typeProperties.clientId", - required: true, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { name: "any" } }, - clientSecret: { - serializedName: "typeProperties.clientSecret", + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", type: { name: "Composite", className: "SecretBase" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", + tenant: { + serializedName: "typeProperties.tenant", type: { name: "any" } }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", + azureCloudType: { + serializedName: "typeProperties.azureCloudType", type: { name: "any" } }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", + accountName: { + serializedName: "typeProperties.accountName", + type: { + name: "any" + } + }, + subscriptionId: { + serializedName: "typeProperties.subscriptionId", + type: { + name: "any" + } + }, + resourceGroupName: { + serializedName: "typeProperties.resourceGroupName", type: { name: "any" } @@ -11520,47 +11938,25 @@ export const SalesforceMarketingCloudLinkedService: coreClient.CompositeMapper = } }; -export const HDInsightOnDemandLinkedService: coreClient.CompositeMapper = { - serializedName: "HDInsightOnDemand", +export const AzureBlobFSLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureBlobFS", type: { name: "Composite", - className: "HDInsightOnDemandLinkedService", + className: "AzureBlobFSLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - clusterSize: { - serializedName: "typeProperties.clusterSize", - required: true, - type: { - name: "any" - } - }, - timeToLive: { - serializedName: "typeProperties.timeToLive", - required: true, - type: { - name: "any" - } - }, - version: { - serializedName: "typeProperties.version", + url: { + serializedName: "typeProperties.url", required: true, type: { name: "any" } }, - linkedServiceName: { - serializedName: "typeProperties.linkedServiceName", - type: { - name: "Composite", - className: "LinkedServiceReference" - } - }, - hostSubscriptionId: { - serializedName: "typeProperties.hostSubscriptionId", - required: true, + accountKey: { + serializedName: "typeProperties.accountKey", type: { name: "any" } @@ -11580,173 +11976,176 @@ export const HDInsightOnDemandLinkedService: coreClient.CompositeMapper = { }, tenant: { serializedName: "typeProperties.tenant", - required: true, type: { name: "any" } }, - clusterResourceGroup: { - serializedName: "typeProperties.clusterResourceGroup", - required: true, + azureCloudType: { + serializedName: "typeProperties.azureCloudType", type: { name: "any" } }, - clusterNamePrefix: { - serializedName: "typeProperties.clusterNamePrefix", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } - }, - clusterUserName: { - serializedName: "typeProperties.clusterUserName", + } + } + } +}; + +export const Office365LinkedService: coreClient.CompositeMapper = { + serializedName: "Office365", + type: { + name: "Composite", + className: "Office365LinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + office365TenantId: { + serializedName: "typeProperties.office365TenantId", + required: true, type: { name: "any" } }, - clusterPassword: { - serializedName: "typeProperties.clusterPassword", + servicePrincipalTenantId: { + serializedName: "typeProperties.servicePrincipalTenantId", + required: true, type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - clusterSshUserName: { - serializedName: "typeProperties.clusterSshUserName", + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + required: true, type: { name: "any" } }, - clusterSshPassword: { - serializedName: "typeProperties.clusterSshPassword", + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", type: { name: "Composite", className: "SecretBase" } }, - additionalLinkedServiceNames: { - serializedName: "typeProperties.additionalLinkedServiceNames", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "LinkedServiceReference" - } - } - } - }, - hcatalogLinkedServiceName: { - serializedName: "typeProperties.hcatalogLinkedServiceName", - type: { - name: "Composite", - className: "LinkedServiceReference" - } - }, - clusterType: { - serializedName: "typeProperties.clusterType", - type: { - name: "any" - } - }, - sparkVersion: { - serializedName: "typeProperties.sparkVersion", - type: { - name: "any" - } - }, - coreConfiguration: { - serializedName: "typeProperties.coreConfiguration", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } - }, - hBaseConfiguration: { - serializedName: "typeProperties.hBaseConfiguration", + } + } + } +}; + +export const SalesforceLinkedService: coreClient.CompositeMapper = { + serializedName: "Salesforce", + type: { + name: "Composite", + className: "SalesforceLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + environmentUrl: { + serializedName: "typeProperties.environmentUrl", type: { name: "any" } }, - hdfsConfiguration: { - serializedName: "typeProperties.hdfsConfiguration", + username: { + serializedName: "typeProperties.username", type: { name: "any" } }, - hiveConfiguration: { - serializedName: "typeProperties.hiveConfiguration", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - mapReduceConfiguration: { - serializedName: "typeProperties.mapReduceConfiguration", + securityToken: { + serializedName: "typeProperties.securityToken", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - oozieConfiguration: { - serializedName: "typeProperties.oozieConfiguration", + apiVersion: { + serializedName: "typeProperties.apiVersion", type: { name: "any" } }, - stormConfiguration: { - serializedName: "typeProperties.stormConfiguration", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } - }, - yarnConfiguration: { - serializedName: "typeProperties.yarnConfiguration", + } + } + } +}; + +export const SalesforceServiceCloudLinkedService: coreClient.CompositeMapper = { + serializedName: "SalesforceServiceCloud", + type: { + name: "Composite", + className: "SalesforceServiceCloudLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + environmentUrl: { + serializedName: "typeProperties.environmentUrl", type: { name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + username: { + serializedName: "typeProperties.username", type: { name: "any" } }, - headNodeSize: { - serializedName: "typeProperties.headNodeSize", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - dataNodeSize: { - serializedName: "typeProperties.dataNodeSize", + securityToken: { + serializedName: "typeProperties.securityToken", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - zookeeperNodeSize: { - serializedName: "typeProperties.zookeeperNodeSize", + apiVersion: { + serializedName: "typeProperties.apiVersion", type: { name: "any" } }, - scriptActions: { - serializedName: "typeProperties.scriptActions", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ScriptAction" - } - } - } - }, - virtualNetworkId: { - serializedName: "typeProperties.virtualNetworkId", + extendedProperties: { + serializedName: "typeProperties.extendedProperties", type: { name: "any" } }, - subnetName: { - serializedName: "typeProperties.subnetName", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -11755,63 +12154,38 @@ export const HDInsightOnDemandLinkedService: coreClient.CompositeMapper = { } }; -export const AzureDataLakeAnalyticsLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureDataLakeAnalytics", +export const SapCloudForCustomerLinkedService: coreClient.CompositeMapper = { + serializedName: "SapCloudForCustomer", type: { name: "Composite", - className: "AzureDataLakeAnalyticsLinkedService", + className: "SapCloudForCustomerLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - accountName: { - serializedName: "typeProperties.accountName", + url: { + serializedName: "typeProperties.url", required: true, type: { name: "any" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", + username: { + serializedName: "typeProperties.username", type: { name: "any" } }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", className: "SecretBase" } }, - tenant: { - serializedName: "typeProperties.tenant", - required: true, - type: { - name: "any" - } - }, - subscriptionId: { - serializedName: "typeProperties.subscriptionId", - type: { - name: "any" - } - }, - resourceGroupName: { - serializedName: "typeProperties.resourceGroupName", - type: { - name: "any" - } - }, - dataLakeAnalyticsUri: { - serializedName: "typeProperties.dataLakeAnalyticsUri", - type: { - name: "any" - } - }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -11820,113 +12194,116 @@ export const AzureDataLakeAnalyticsLinkedService: coreClient.CompositeMapper = { } }; -export const AzureDatabricksLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureDatabricks", +export const SapEccLinkedService: coreClient.CompositeMapper = { + serializedName: "SapEcc", type: { name: "Composite", - className: "AzureDatabricksLinkedService", + className: "SapEccLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - domain: { - serializedName: "typeProperties.domain", + url: { + serializedName: "typeProperties.url", required: true, type: { - name: "any" + name: "String" } }, - accessToken: { - serializedName: "typeProperties.accessToken", + username: { + serializedName: "typeProperties.username", type: { - name: "Composite", - className: "SecretBase" + name: "String" } }, - authentication: { - serializedName: "typeProperties.authentication", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - workspaceResourceId: { - serializedName: "typeProperties.workspaceResourceId", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { - name: "any" + name: "String" } - }, - existingClusterId: { - serializedName: "typeProperties.existingClusterId", + } + } + } +}; + +export const SapOpenHubLinkedService: coreClient.CompositeMapper = { + serializedName: "SapOpenHub", + type: { + name: "Composite", + className: "SapOpenHubLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, type: { name: "any" } }, - instancePoolId: { - serializedName: "typeProperties.instancePoolId", + systemNumber: { + serializedName: "typeProperties.systemNumber", + required: true, type: { name: "any" } }, - newClusterVersion: { - serializedName: "typeProperties.newClusterVersion", + clientId: { + serializedName: "typeProperties.clientId", + required: true, type: { name: "any" } }, - newClusterNumOfWorker: { - serializedName: "typeProperties.newClusterNumOfWorker", + language: { + serializedName: "typeProperties.language", type: { name: "any" } }, - newClusterNodeType: { - serializedName: "typeProperties.newClusterNodeType", + systemId: { + serializedName: "typeProperties.systemId", type: { name: "any" } }, - newClusterSparkConf: { - serializedName: "typeProperties.newClusterSparkConf", - type: { - name: "Dictionary", - value: { type: { name: "any" } } - } - }, - newClusterSparkEnvVars: { - serializedName: "typeProperties.newClusterSparkEnvVars", - type: { - name: "Dictionary", - value: { type: { name: "any" } } - } - }, - newClusterCustomTags: { - serializedName: "typeProperties.newClusterCustomTags", + userName: { + serializedName: "typeProperties.userName", type: { - name: "Dictionary", - value: { type: { name: "any" } } + name: "any" } }, - newClusterLogDestination: { - serializedName: "typeProperties.newClusterLogDestination", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - newClusterDriverNodeType: { - serializedName: "typeProperties.newClusterDriverNodeType", + messageServer: { + serializedName: "typeProperties.messageServer", type: { name: "any" } }, - newClusterInitScripts: { - serializedName: "typeProperties.newClusterInitScripts", + messageServerService: { + serializedName: "typeProperties.messageServerService", type: { name: "any" } }, - newClusterEnableElasticDisk: { - serializedName: "typeProperties.newClusterEnableElasticDisk", + logonGroup: { + serializedName: "typeProperties.logonGroup", type: { name: "any" } @@ -11936,102 +12313,81 @@ export const AzureDatabricksLinkedService: coreClient.CompositeMapper = { type: { name: "any" } - }, - policyId: { - serializedName: "typeProperties.policyId", - type: { - name: "any" - } } } } }; -export const AzureDatabricksDeltaLakeLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureDatabricksDeltaLake", +export const RestServiceLinkedService: coreClient.CompositeMapper = { + serializedName: "RestService", type: { name: "Composite", - className: "AzureDatabricksDeltaLakeLinkedService", + className: "RestServiceLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - domain: { - serializedName: "typeProperties.domain", + url: { + serializedName: "typeProperties.url", required: true, type: { name: "any" } }, - accessToken: { - serializedName: "typeProperties.accessToken", + enableServerCertificateValidation: { + serializedName: "typeProperties.enableServerCertificateValidation", type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - clusterId: { - serializedName: "typeProperties.clusterId", + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, type: { - name: "any" + name: "String" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + userName: { + serializedName: "typeProperties.userName", type: { name: "any" } - } - } - } -}; - -export const ResponsysLinkedService: coreClient.CompositeMapper = { - serializedName: "Responsys", - type: { - name: "Composite", - className: "ResponsysLinkedService", - uberParent: "LinkedService", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, - modelProperties: { - ...LinkedService.type.modelProperties, - endpoint: { - serializedName: "typeProperties.endpoint", - required: true, + }, + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - clientId: { - serializedName: "typeProperties.clientId", - required: true, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { name: "any" } }, - clientSecret: { - serializedName: "typeProperties.clientSecret", + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", type: { name: "Composite", className: "SecretBase" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", + tenant: { + serializedName: "typeProperties.tenant", type: { name: "any" } }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", + azureCloudType: { + serializedName: "typeProperties.azureCloudType", type: { name: "any" } }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", + aadResourceId: { + serializedName: "typeProperties.aadResourceId", type: { name: "any" } @@ -12046,49 +12402,46 @@ export const ResponsysLinkedService: coreClient.CompositeMapper = { } }; -export const DynamicsAXLinkedService: coreClient.CompositeMapper = { - serializedName: "DynamicsAX", +export const AmazonS3LinkedService: coreClient.CompositeMapper = { + serializedName: "AmazonS3", type: { name: "Composite", - className: "DynamicsAXLinkedService", + className: "AmazonS3LinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - url: { - serializedName: "typeProperties.url", - required: true, + authenticationType: { + serializedName: "typeProperties.authenticationType", type: { name: "any" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", - required: true, + accessKeyId: { + serializedName: "typeProperties.accessKeyId", type: { name: "any" } }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", + secretAccessKey: { + serializedName: "typeProperties.secretAccessKey", type: { name: "Composite", className: "SecretBase" } }, - tenant: { - serializedName: "typeProperties.tenant", - required: true, + serviceUrl: { + serializedName: "typeProperties.serviceUrl", type: { name: "any" } }, - aadResourceId: { - serializedName: "typeProperties.aadResourceId", - required: true, + sessionToken: { + serializedName: "typeProperties.sessionToken", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, encryptedCredential: { @@ -12101,18 +12454,18 @@ export const DynamicsAXLinkedService: coreClient.CompositeMapper = { } }; -export const OracleServiceCloudLinkedService: coreClient.CompositeMapper = { - serializedName: "OracleServiceCloud", +export const AmazonRedshiftLinkedService: coreClient.CompositeMapper = { + serializedName: "AmazonRedshift", type: { name: "Composite", - className: "OracleServiceCloudLinkedService", + className: "AmazonRedshiftLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - host: { - serializedName: "typeProperties.host", + server: { + serializedName: "typeProperties.server", required: true, type: { name: "any" @@ -12120,7 +12473,6 @@ export const OracleServiceCloudLinkedService: coreClient.CompositeMapper = { }, username: { serializedName: "typeProperties.username", - required: true, type: { name: "any" } @@ -12132,20 +12484,15 @@ export const OracleServiceCloudLinkedService: coreClient.CompositeMapper = { className: "SecretBase" } }, - useEncryptedEndpoints: { - serializedName: "typeProperties.useEncryptedEndpoints", + database: { + serializedName: "typeProperties.database", + required: true, type: { name: "any" } }, - useHostVerification: { - serializedName: "typeProperties.useHostVerification", - type: { - name: "any" - } - }, - usePeerVerification: { - serializedName: "typeProperties.usePeerVerification", + port: { + serializedName: "typeProperties.port", type: { name: "any" } @@ -12160,83 +12507,117 @@ export const OracleServiceCloudLinkedService: coreClient.CompositeMapper = { } }; -export const GoogleAdWordsLinkedService: coreClient.CompositeMapper = { - serializedName: "GoogleAdWords", +export const CustomDataSourceLinkedService: coreClient.CompositeMapper = { + serializedName: "CustomDataSource", type: { name: "Composite", - className: "GoogleAdWordsLinkedService", + className: "CustomDataSourceLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - clientCustomerID: { - serializedName: "typeProperties.clientCustomerID", + typeProperties: { + serializedName: "typeProperties", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AzureSearchLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureSearch", + type: { + name: "Composite", + className: "AzureSearchLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", required: true, type: { name: "any" } }, - developerToken: { - serializedName: "typeProperties.developerToken", + key: { + serializedName: "typeProperties.key", type: { name: "Composite", className: "SecretBase" } }, - authenticationType: { - serializedName: "typeProperties.authenticationType", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HttpLinkedService: coreClient.CompositeMapper = { + serializedName: "HttpServer", + type: { + name: "Composite", + className: "HttpLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", required: true, type: { - name: "String" + name: "any" } }, - refreshToken: { - serializedName: "typeProperties.refreshToken", + authenticationType: { + serializedName: "typeProperties.authenticationType", type: { - name: "Composite", - className: "SecretBase" + name: "String" } }, - clientId: { - serializedName: "typeProperties.clientId", + userName: { + serializedName: "typeProperties.userName", type: { name: "any" } }, - clientSecret: { - serializedName: "typeProperties.clientSecret", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", className: "SecretBase" } }, - email: { - serializedName: "typeProperties.email", - type: { - name: "any" - } - }, - keyFilePath: { - serializedName: "typeProperties.keyFilePath", + embeddedCertData: { + serializedName: "typeProperties.embeddedCertData", type: { name: "any" } }, - trustedCertPath: { - serializedName: "typeProperties.trustedCertPath", + certThumbprint: { + serializedName: "typeProperties.certThumbprint", type: { name: "any" } }, - useSystemTrustStore: { - serializedName: "typeProperties.useSystemTrustStore", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", + enableServerCertificateValidation: { + serializedName: "typeProperties.enableServerCertificateValidation", type: { name: "any" } @@ -12245,44 +12626,33 @@ export const GoogleAdWordsLinkedService: coreClient.CompositeMapper = { } }; -export const SapTableLinkedService: coreClient.CompositeMapper = { - serializedName: "SapTable", +export const FtpServerLinkedService: coreClient.CompositeMapper = { + serializedName: "FtpServer", type: { name: "Composite", - className: "SapTableLinkedService", + className: "FtpServerLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - server: { - serializedName: "typeProperties.server", - type: { - name: "any" - } - }, - systemNumber: { - serializedName: "typeProperties.systemNumber", - type: { - name: "any" - } - }, - clientId: { - serializedName: "typeProperties.clientId", + host: { + serializedName: "typeProperties.host", + required: true, type: { name: "any" } }, - language: { - serializedName: "typeProperties.language", + port: { + serializedName: "typeProperties.port", type: { name: "any" } }, - systemId: { - serializedName: "typeProperties.systemId", + authenticationType: { + serializedName: "typeProperties.authenticationType", type: { - name: "any" + name: "String" } }, userName: { @@ -12298,52 +12668,68 @@ export const SapTableLinkedService: coreClient.CompositeMapper = { className: "SecretBase" } }, - messageServer: { - serializedName: "typeProperties.messageServer", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } }, - messageServerService: { - serializedName: "typeProperties.messageServerService", + enableSsl: { + serializedName: "typeProperties.enableSsl", type: { name: "any" } }, - sncMode: { - serializedName: "typeProperties.sncMode", + enableServerCertificateValidation: { + serializedName: "typeProperties.enableServerCertificateValidation", type: { name: "any" } - }, - sncMyName: { - serializedName: "typeProperties.sncMyName", + } + } + } +}; + +export const SftpServerLinkedService: coreClient.CompositeMapper = { + serializedName: "Sftp", + type: { + name: "Composite", + className: "SftpServerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, type: { name: "any" } }, - sncPartnerName: { - serializedName: "typeProperties.sncPartnerName", + port: { + serializedName: "typeProperties.port", type: { name: "any" } }, - sncLibraryPath: { - serializedName: "typeProperties.sncLibraryPath", + authenticationType: { + serializedName: "typeProperties.authenticationType", type: { - name: "any" + name: "String" } }, - sncQop: { - serializedName: "typeProperties.sncQop", + userName: { + serializedName: "typeProperties.userName", type: { name: "any" } }, - logonGroup: { - serializedName: "typeProperties.logonGroup", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, encryptedCredential: { @@ -12351,52 +12737,35 @@ export const SapTableLinkedService: coreClient.CompositeMapper = { type: { name: "any" } - } - } - } -}; - -export const AzureDataExplorerLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureDataExplorer", - type: { - name: "Composite", - className: "AzureDataExplorerLinkedService", - uberParent: "LinkedService", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, - modelProperties: { - ...LinkedService.type.modelProperties, - endpoint: { - serializedName: "typeProperties.endpoint", - required: true, + }, + privateKeyPath: { + serializedName: "typeProperties.privateKeyPath", type: { name: "any" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", - required: true, + privateKeyContent: { + serializedName: "typeProperties.privateKeyContent", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", + passPhrase: { + serializedName: "typeProperties.passPhrase", type: { name: "Composite", className: "SecretBase" } }, - database: { - serializedName: "typeProperties.database", - required: true, + skipHostKeyValidation: { + serializedName: "typeProperties.skipHostKeyValidation", type: { name: "any" } }, - tenant: { - serializedName: "typeProperties.tenant", - required: true, + hostKeyFingerprint: { + serializedName: "typeProperties.hostKeyFingerprint", type: { name: "any" } @@ -12405,53 +12774,39 @@ export const AzureDataExplorerLinkedService: coreClient.CompositeMapper = { } }; -export const AzureFunctionLinkedService: coreClient.CompositeMapper = { - serializedName: "AzureFunction", +export const SapBWLinkedService: coreClient.CompositeMapper = { + serializedName: "SapBW", type: { name: "Composite", - className: "AzureFunctionLinkedService", + className: "SapBWLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - functionAppUrl: { - serializedName: "typeProperties.functionAppUrl", + server: { + serializedName: "typeProperties.server", required: true, type: { name: "any" } }, - functionKey: { - serializedName: "typeProperties.functionKey", + systemNumber: { + serializedName: "typeProperties.systemNumber", + required: true, type: { - name: "Composite", - className: "SecretBase" + name: "any" } }, - encryptedCredential: { - serializedName: "typeProperties.encryptedCredential", - type: { + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { name: "any" } - } - } - } -}; - -export const SnowflakeLinkedService: coreClient.CompositeMapper = { - serializedName: "Snowflake", - type: { - name: "Composite", - className: "SnowflakeLinkedService", - uberParent: "LinkedService", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, - modelProperties: { - ...LinkedService.type.modelProperties, - connectionString: { - serializedName: "typeProperties.connectionString", - required: true, + }, + userName: { + serializedName: "typeProperties.userName", type: { name: "any" } @@ -12460,7 +12815,7 @@ export const SnowflakeLinkedService: coreClient.CompositeMapper = { serializedName: "typeProperties.password", type: { name: "Composite", - className: "AzureKeyVaultSecretReference" + className: "SecretBase" } }, encryptedCredential: { @@ -12473,39 +12828,43 @@ export const SnowflakeLinkedService: coreClient.CompositeMapper = { } }; -export const SharePointOnlineListLinkedService: coreClient.CompositeMapper = { - serializedName: "SharePointOnlineList", +export const SapHanaLinkedService: coreClient.CompositeMapper = { + serializedName: "SapHana", type: { name: "Composite", - className: "SharePointOnlineListLinkedService", + className: "SapHanaLinkedService", uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { ...LinkedService.type.modelProperties, - siteUrl: { - serializedName: "typeProperties.siteUrl", - required: true, + connectionString: { + serializedName: "typeProperties.connectionString", type: { name: "any" } }, - tenantId: { - serializedName: "typeProperties.tenantId", + server: { + serializedName: "typeProperties.server", required: true, type: { name: "any" } }, - servicePrincipalId: { - serializedName: "typeProperties.servicePrincipalId", - required: true, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", type: { name: "any" } }, - servicePrincipalKey: { - serializedName: "typeProperties.servicePrincipalKey", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", className: "SecretBase" @@ -12521,152 +12880,176 @@ export const SharePointOnlineListLinkedService: coreClient.CompositeMapper = { } }; -export const AmazonS3Dataset: coreClient.CompositeMapper = { - serializedName: "AmazonS3Object", +export const AmazonMWSLinkedService: coreClient.CompositeMapper = { + serializedName: "AmazonMWS", type: { name: "Composite", - className: "AmazonS3Dataset", - uberParent: "Dataset", + className: "AmazonMWSLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - bucketName: { - serializedName: "typeProperties.bucketName", + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", required: true, type: { name: "any" } }, - key: { - serializedName: "typeProperties.key", + marketplaceID: { + serializedName: "typeProperties.marketplaceID", + required: true, type: { name: "any" } }, - prefix: { - serializedName: "typeProperties.prefix", + sellerID: { + serializedName: "typeProperties.sellerID", + required: true, type: { name: "any" } }, - version: { - serializedName: "typeProperties.version", + mwsAuthToken: { + serializedName: "typeProperties.mwsAuthToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessKeyId: { + serializedName: "typeProperties.accessKeyId", + required: true, type: { name: "any" } }, - modifiedDatetimeStart: { - serializedName: "typeProperties.modifiedDatetimeStart", + secretKey: { + serializedName: "typeProperties.secretKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", type: { name: "any" } }, - modifiedDatetimeEnd: { - serializedName: "typeProperties.modifiedDatetimeEnd", + useHostVerification: { + serializedName: "typeProperties.useHostVerification", type: { name: "any" } }, - format: { - serializedName: "typeProperties.format", + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", type: { - name: "Composite", - className: "DatasetStorageFormat" + name: "any" } }, - compression: { - serializedName: "typeProperties.compression", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { - name: "Composite", - className: "DatasetCompression" + name: "any" } } } } }; -export const AvroDataset: coreClient.CompositeMapper = { - serializedName: "Avro", +export const AzurePostgreSqlLinkedService: coreClient.CompositeMapper = { + serializedName: "AzurePostgreSql", type: { name: "Composite", - className: "AvroDataset", - uberParent: "Dataset", + className: "AzurePostgreSqlLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - location: { - serializedName: "typeProperties.location", + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", type: { - name: "Composite", - className: "DatasetLocation" + name: "any" } }, - avroCompressionCodec: { - serializedName: "typeProperties.avroCompressionCodec", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "AzureKeyVaultSecretReference" } }, - avroCompressionLevel: { - constraints: { - InclusiveMaximum: 9, - InclusiveMinimum: 1 - }, - serializedName: "typeProperties.avroCompressionLevel", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { - name: "Number" + name: "any" } } } } }; -export const ExcelDataset: coreClient.CompositeMapper = { - serializedName: "Excel", +export const ConcurLinkedService: coreClient.CompositeMapper = { + serializedName: "Concur", type: { name: "Composite", - className: "ExcelDataset", - uberParent: "Dataset", + className: "ConcurLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - location: { - serializedName: "typeProperties.location", + ...LinkedService.type.modelProperties, + connectionProperties: { + serializedName: "typeProperties.connectionProperties", type: { - name: "Composite", - className: "DatasetLocation" + name: "any" } }, - sheetName: { - serializedName: "typeProperties.sheetName", + clientId: { + serializedName: "typeProperties.clientId", + required: true, type: { name: "any" } }, - range: { - serializedName: "typeProperties.range", + username: { + serializedName: "typeProperties.username", + required: true, type: { name: "any" } }, - firstRowAsHeader: { - serializedName: "typeProperties.firstRowAsHeader", + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", type: { name: "any" } }, - compression: { - serializedName: "typeProperties.compression", + useHostVerification: { + serializedName: "typeProperties.useHostVerification", type: { - name: "Composite", - className: "DatasetCompression" + name: "any" } }, - nullValue: { - serializedName: "typeProperties.nullValue", + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -12675,25 +13058,31 @@ export const ExcelDataset: coreClient.CompositeMapper = { } }; -export const ParquetDataset: coreClient.CompositeMapper = { - serializedName: "Parquet", +export const CouchbaseLinkedService: coreClient.CompositeMapper = { + serializedName: "Couchbase", type: { name: "Composite", - className: "ParquetDataset", - uberParent: "Dataset", + className: "CouchbaseLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - location: { - serializedName: "typeProperties.location", + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + credString: { + serializedName: "typeProperties.credString", type: { name: "Composite", - className: "DatasetLocation" + className: "AzureKeyVaultSecretReference" } }, - compressionCodec: { - serializedName: "typeProperties.compressionCodec", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -12702,73 +13091,90 @@ export const ParquetDataset: coreClient.CompositeMapper = { } }; -export const DelimitedTextDataset: coreClient.CompositeMapper = { - serializedName: "DelimitedText", +export const DrillLinkedService: coreClient.CompositeMapper = { + serializedName: "Drill", type: { name: "Composite", - className: "DelimitedTextDataset", - uberParent: "Dataset", + className: "DrillLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - location: { - serializedName: "typeProperties.location", + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", type: { - name: "Composite", - className: "DatasetLocation" + name: "any" } }, - columnDelimiter: { - serializedName: "typeProperties.columnDelimiter", + pwd: { + serializedName: "typeProperties.pwd", type: { - name: "any" + name: "Composite", + className: "AzureKeyVaultSecretReference" } }, - rowDelimiter: { - serializedName: "typeProperties.rowDelimiter", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } - }, - encodingName: { - serializedName: "typeProperties.encodingName", + } + } + } +}; + +export const EloquaLinkedService: coreClient.CompositeMapper = { + serializedName: "Eloqua", + type: { + name: "Composite", + className: "EloquaLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, type: { name: "any" } }, - compressionCodec: { - serializedName: "typeProperties.compressionCodec", + username: { + serializedName: "typeProperties.username", + required: true, type: { - name: "String" + name: "any" } }, - compressionLevel: { - serializedName: "typeProperties.compressionLevel", + password: { + serializedName: "typeProperties.password", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - quoteChar: { - serializedName: "typeProperties.quoteChar", + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", type: { name: "any" } }, - escapeChar: { - serializedName: "typeProperties.escapeChar", + useHostVerification: { + serializedName: "typeProperties.useHostVerification", type: { name: "any" } }, - firstRowAsHeader: { - serializedName: "typeProperties.firstRowAsHeader", + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", type: { name: "any" } }, - nullValue: { - serializedName: "typeProperties.nullValue", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -12777,302 +13183,323 @@ export const DelimitedTextDataset: coreClient.CompositeMapper = { } }; -export const JsonDataset: coreClient.CompositeMapper = { - serializedName: "Json", +export const GoogleBigQueryLinkedService: coreClient.CompositeMapper = { + serializedName: "GoogleBigQuery", type: { name: "Composite", - className: "JsonDataset", - uberParent: "Dataset", + className: "GoogleBigQueryLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - location: { - serializedName: "typeProperties.location", + ...LinkedService.type.modelProperties, + project: { + serializedName: "typeProperties.project", + required: true, type: { - name: "Composite", - className: "DatasetLocation" + name: "any" } }, - encodingName: { - serializedName: "typeProperties.encodingName", + additionalProjects: { + serializedName: "typeProperties.additionalProjects", type: { name: "any" } }, - compression: { - serializedName: "typeProperties.compression", + requestGoogleDriveScope: { + serializedName: "typeProperties.requestGoogleDriveScope", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + refreshToken: { + serializedName: "typeProperties.refreshToken", type: { name: "Composite", - className: "DatasetCompression" + className: "SecretBase" } - } - } - } -}; - -export const XmlDataset: coreClient.CompositeMapper = { - serializedName: "Xml", - type: { - name: "Composite", - className: "XmlDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - location: { - serializedName: "typeProperties.location", + }, + clientId: { + serializedName: "typeProperties.clientId", + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", type: { name: "Composite", - className: "DatasetLocation" + className: "SecretBase" } }, - encodingName: { - serializedName: "typeProperties.encodingName", + email: { + serializedName: "typeProperties.email", type: { name: "any" } }, - nullValue: { - serializedName: "typeProperties.nullValue", + keyFilePath: { + serializedName: "typeProperties.keyFilePath", type: { name: "any" } }, - compression: { - serializedName: "typeProperties.compression", + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", type: { - name: "Composite", - className: "DatasetCompression" + name: "any" } - } - } - } -}; - -export const OrcDataset: coreClient.CompositeMapper = { - serializedName: "Orc", - type: { - name: "Composite", - className: "OrcDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - location: { - serializedName: "typeProperties.location", + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", type: { - name: "Composite", - className: "DatasetLocation" + name: "any" } }, - orcCompressionCodec: { - serializedName: "typeProperties.orcCompressionCodec", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { - name: "String" + name: "any" } } } } }; -export const BinaryDataset: coreClient.CompositeMapper = { - serializedName: "Binary", +export const GreenplumLinkedService: coreClient.CompositeMapper = { + serializedName: "Greenplum", type: { name: "Composite", - className: "BinaryDataset", - uberParent: "Dataset", + className: "GreenplumLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - location: { - serializedName: "typeProperties.location", + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", type: { - name: "Composite", - className: "DatasetLocation" + name: "any" } }, - compression: { - serializedName: "typeProperties.compression", + pwd: { + serializedName: "typeProperties.pwd", type: { name: "Composite", - className: "DatasetCompression" + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" } } } } }; -export const AzureBlobDataset: coreClient.CompositeMapper = { - serializedName: "AzureBlob", +export const HBaseLinkedService: coreClient.CompositeMapper = { + serializedName: "HBase", type: { name: "Composite", - className: "AzureBlobDataset", - uberParent: "Dataset", + className: "HBaseLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - folderPath: { - serializedName: "typeProperties.folderPath", + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, type: { name: "any" } }, - tableRootLocation: { - serializedName: "typeProperties.tableRootLocation", + port: { + serializedName: "typeProperties.port", type: { name: "any" } }, - fileName: { - serializedName: "typeProperties.fileName", + httpPath: { + serializedName: "typeProperties.httpPath", type: { name: "any" } }, - modifiedDatetimeStart: { - serializedName: "typeProperties.modifiedDatetimeStart", + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, type: { - name: "any" + name: "String" } }, - modifiedDatetimeEnd: { - serializedName: "typeProperties.modifiedDatetimeEnd", + username: { + serializedName: "typeProperties.username", type: { name: "any" } }, - format: { - serializedName: "typeProperties.format", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", - className: "DatasetStorageFormat" + className: "SecretBase" } }, - compression: { - serializedName: "typeProperties.compression", + enableSsl: { + serializedName: "typeProperties.enableSsl", type: { - name: "Composite", - className: "DatasetCompression" + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" } } } } }; -export const AzureTableDataset: coreClient.CompositeMapper = { - serializedName: "AzureTable", +export const HiveLinkedService: coreClient.CompositeMapper = { + serializedName: "Hive", type: { name: "Composite", - className: "AzureTableDataset", - uberParent: "Dataset", + className: "HiveLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", required: true, type: { name: "any" } - } - } - } -}; - -export const AzureSqlTableDataset: coreClient.CompositeMapper = { - serializedName: "AzureSqlTable", - type: { - name: "Composite", - className: "AzureSqlTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + port: { + serializedName: "typeProperties.port", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + serverType: { + serializedName: "typeProperties.serverType", + type: { + name: "String" + } + }, + thriftTransportProtocol: { + serializedName: "typeProperties.thriftTransportProtocol", + type: { + name: "String" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + serviceDiscoveryMode: { + serializedName: "typeProperties.serviceDiscoveryMode", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + zooKeeperNameSpace: { + serializedName: "typeProperties.zooKeeperNameSpace", type: { name: "any" } - } - } - } -}; - -export const AzureSqlMITableDataset: coreClient.CompositeMapper = { - serializedName: "AzureSqlMITable", - type: { - name: "Composite", - className: "AzureSqlMITableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + useNativeQuery: { + serializedName: "typeProperties.useNativeQuery", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + username: { + serializedName: "typeProperties.username", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + httpPath: { + serializedName: "typeProperties.httpPath", type: { name: "any" } - } - } - } -}; - -export const AzureSqlDWTableDataset: coreClient.CompositeMapper = { - serializedName: "AzureSqlDWTable", - type: { - name: "Composite", - className: "AzureSqlDWTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13081,24 +13508,64 @@ export const AzureSqlDWTableDataset: coreClient.CompositeMapper = { } }; -export const CassandraTableDataset: coreClient.CompositeMapper = { - serializedName: "CassandraTable", +export const HubspotLinkedService: coreClient.CompositeMapper = { + serializedName: "Hubspot", type: { name: "Composite", - className: "CassandraTableDataset", - uberParent: "Dataset", + className: "HubspotLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + clientId: { + serializedName: "typeProperties.clientId", + required: true, type: { name: "any" } }, - keyspace: { - serializedName: "typeProperties.keyspace", + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + refreshToken: { + serializedName: "typeProperties.refreshToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13107,227 +13574,146 @@ export const CassandraTableDataset: coreClient.CompositeMapper = { } }; -export const CustomDataset: coreClient.CompositeMapper = { - serializedName: "CustomDataset", +export const ImpalaLinkedService: coreClient.CompositeMapper = { + serializedName: "Impala", type: { name: "Composite", - className: "CustomDataset", - uberParent: "Dataset", + className: "ImpalaLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - typeProperties: { - serializedName: "typeProperties", + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, type: { name: "any" } - } - } - } -}; - -export const CosmosDbSqlApiCollectionDataset: coreClient.CompositeMapper = { - serializedName: "CosmosDbSqlApiCollection", - type: { - name: "Composite", - className: "CosmosDbSqlApiCollectionDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - collectionName: { - serializedName: "typeProperties.collectionName", - required: true, + }, + port: { + serializedName: "typeProperties.port", type: { name: "any" } - } - } - } -}; - -export const DocumentDbCollectionDataset: coreClient.CompositeMapper = { - serializedName: "DocumentDbCollection", - type: { - name: "Composite", - className: "DocumentDbCollectionDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - collectionName: { - serializedName: "typeProperties.collectionName", + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", required: true, type: { - name: "any" + name: "String" } - } - } - } -}; - -export const DynamicsEntityDataset: coreClient.CompositeMapper = { - serializedName: "DynamicsEntity", - type: { - name: "Composite", - className: "DynamicsEntityDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - entityName: { - serializedName: "typeProperties.entityName", + }, + username: { + serializedName: "typeProperties.username", type: { name: "any" } - } - } - } -}; - -export const DynamicsCrmEntityDataset: coreClient.CompositeMapper = { - serializedName: "DynamicsCrmEntity", - type: { - name: "Composite", - className: "DynamicsCrmEntityDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - entityName: { - serializedName: "typeProperties.entityName", + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", type: { name: "any" } - } - } - } -}; - -export const CommonDataServiceForAppsEntityDataset: coreClient.CompositeMapper = { - serializedName: "CommonDataServiceForAppsEntity", - type: { - name: "Composite", - className: "CommonDataServiceForAppsEntityDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - entityName: { - serializedName: "typeProperties.entityName", + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", type: { name: "any" } - } - } - } -}; - -export const AzureDataLakeStoreDataset: coreClient.CompositeMapper = { - serializedName: "AzureDataLakeStoreFile", - type: { - name: "Composite", - className: "AzureDataLakeStoreDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - folderPath: { - serializedName: "typeProperties.folderPath", + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", type: { name: "any" } }, - fileName: { - serializedName: "typeProperties.fileName", + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", type: { name: "any" } }, - format: { - serializedName: "typeProperties.format", + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", type: { - name: "Composite", - className: "DatasetStorageFormat" + name: "any" } }, - compression: { - serializedName: "typeProperties.compression", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { - name: "Composite", - className: "DatasetCompression" + name: "any" } } } } }; -export const AzureBlobFSDataset: coreClient.CompositeMapper = { - serializedName: "AzureBlobFSFile", +export const JiraLinkedService: coreClient.CompositeMapper = { + serializedName: "Jira", type: { name: "Composite", - className: "AzureBlobFSDataset", - uberParent: "Dataset", + className: "JiraLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - folderPath: { - serializedName: "typeProperties.folderPath", + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, type: { name: "any" } }, - fileName: { - serializedName: "typeProperties.fileName", + port: { + serializedName: "typeProperties.port", type: { name: "any" } }, - format: { - serializedName: "typeProperties.format", + username: { + serializedName: "typeProperties.username", + required: true, type: { - name: "Composite", - className: "DatasetStorageFormat" + name: "any" } }, - compression: { - serializedName: "typeProperties.compression", + password: { + serializedName: "typeProperties.password", type: { name: "Composite", - className: "DatasetCompression" + className: "SecretBase" } - } - } - } -}; - -export const Office365Dataset: coreClient.CompositeMapper = { - serializedName: "Office365Table", - type: { - name: "Composite", - className: "Office365Dataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", - required: true, + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", type: { name: "any" } }, - predicate: { - serializedName: "typeProperties.predicate", + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13336,119 +13722,83 @@ export const Office365Dataset: coreClient.CompositeMapper = { } }; -export const FileShareDataset: coreClient.CompositeMapper = { - serializedName: "FileShare", +export const MagentoLinkedService: coreClient.CompositeMapper = { + serializedName: "Magento", type: { name: "Composite", - className: "FileShareDataset", - uberParent: "Dataset", + className: "MagentoLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - folderPath: { - serializedName: "typeProperties.folderPath", + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, type: { name: "any" } }, - fileName: { - serializedName: "typeProperties.fileName", + accessToken: { + serializedName: "typeProperties.accessToken", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - modifiedDatetimeStart: { - serializedName: "typeProperties.modifiedDatetimeStart", + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", type: { name: "any" } }, - modifiedDatetimeEnd: { - serializedName: "typeProperties.modifiedDatetimeEnd", + useHostVerification: { + serializedName: "typeProperties.useHostVerification", type: { name: "any" } }, - format: { - serializedName: "typeProperties.format", - type: { - name: "Composite", - className: "DatasetStorageFormat" - } - }, - fileFilter: { - serializedName: "typeProperties.fileFilter", + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", type: { name: "any" } }, - compression: { - serializedName: "typeProperties.compression", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { - name: "Composite", - className: "DatasetCompression" + name: "any" } } } } }; -export const MongoDbCollectionDataset: coreClient.CompositeMapper = { - serializedName: "MongoDbCollection", +export const MariaDBLinkedService: coreClient.CompositeMapper = { + serializedName: "MariaDB", type: { name: "Composite", - className: "MongoDbCollectionDataset", - uberParent: "Dataset", + className: "MariaDBLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - collectionName: { - serializedName: "typeProperties.collectionName", - required: true, + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", type: { name: "any" } - } - } - } -}; - -export const MongoDbAtlasCollectionDataset: coreClient.CompositeMapper = { - serializedName: "MongoDbAtlasCollection", - type: { - name: "Composite", - className: "MongoDbAtlasCollectionDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - collection: { - serializedName: "typeProperties.collection", - required: true, + }, + pwd: { + serializedName: "typeProperties.pwd", type: { - name: "any" + name: "Composite", + className: "AzureKeyVaultSecretReference" } - } - } - } -}; - -export const MongoDbV2CollectionDataset: coreClient.CompositeMapper = { - serializedName: "MongoDbV2Collection", - type: { - name: "Composite", - className: "MongoDbV2CollectionDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - collection: { - serializedName: "typeProperties.collection", - required: true, + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13457,39 +13807,31 @@ export const MongoDbV2CollectionDataset: coreClient.CompositeMapper = { } }; -export const CosmosDbMongoDbApiCollectionDataset: coreClient.CompositeMapper = { - serializedName: "CosmosDbMongoDbApiCollection", +export const AzureMariaDBLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureMariaDB", type: { name: "Composite", - className: "CosmosDbMongoDbApiCollectionDataset", - uberParent: "Dataset", + className: "AzureMariaDBLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - collection: { - serializedName: "typeProperties.collection", - required: true, + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", type: { name: "any" } - } - } - } -}; - -export const ODataResourceDataset: coreClient.CompositeMapper = { - serializedName: "ODataResource", - type: { - name: "Composite", - className: "ODataResourceDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - path: { - serializedName: "typeProperties.path", + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13498,82 +13840,57 @@ export const ODataResourceDataset: coreClient.CompositeMapper = { } }; -export const OracleTableDataset: coreClient.CompositeMapper = { - serializedName: "OracleTable", +export const MarketoLinkedService: coreClient.CompositeMapper = { + serializedName: "Marketo", type: { name: "Composite", - className: "OracleTableDataset", - uberParent: "Dataset", + className: "MarketoLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + clientId: { + serializedName: "typeProperties.clientId", + required: true, type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + clientSecret: { + serializedName: "typeProperties.clientSecret", type: { - name: "any" + name: "Composite", + className: "SecretBase" } - } - } - } -}; - -export const TeradataTableDataset: coreClient.CompositeMapper = { - serializedName: "TeradataTable", - type: { - name: "Composite", - className: "TeradataTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - database: { - serializedName: "typeProperties.database", + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + useHostVerification: { + serializedName: "typeProperties.useHostVerification", type: { name: "any" } - } - } - } -}; - -export const AzureMySqlTableDataset: coreClient.CompositeMapper = { - serializedName: "AzureMySqlTable", - type: { - name: "Composite", - className: "AzureMySqlTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13582,62 +13899,57 @@ export const AzureMySqlTableDataset: coreClient.CompositeMapper = { } }; -export const AmazonRedshiftTableDataset: coreClient.CompositeMapper = { - serializedName: "AmazonRedshiftTable", +export const PaypalLinkedService: coreClient.CompositeMapper = { + serializedName: "Paypal", type: { name: "Composite", - className: "AmazonRedshiftTableDataset", - uberParent: "Dataset", + className: "PaypalLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + clientId: { + serializedName: "typeProperties.clientId", + required: true, type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", type: { name: "any" } - } - } - } -}; - -export const Db2TableDataset: coreClient.CompositeMapper = { - serializedName: "Db2Table", - type: { - name: "Composite", - className: "Db2TableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13646,18 +13958,394 @@ export const Db2TableDataset: coreClient.CompositeMapper = { } }; -export const RelationalTableDataset: coreClient.CompositeMapper = { - serializedName: "RelationalTable", +export const PhoenixLinkedService: coreClient.CompositeMapper = { + serializedName: "Phoenix", type: { name: "Composite", - className: "RelationalTableDataset", - uberParent: "Dataset", + className: "PhoenixLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + httpPath: { + serializedName: "typeProperties.httpPath", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const PrestoLinkedService: coreClient.CompositeMapper = { + serializedName: "Presto", + type: { + name: "Composite", + className: "PrestoLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + serverVersion: { + serializedName: "typeProperties.serverVersion", + required: true, + type: { + name: "any" + } + }, + catalog: { + serializedName: "typeProperties.catalog", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + timeZoneID: { + serializedName: "typeProperties.timeZoneID", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const QuickBooksLinkedService: coreClient.CompositeMapper = { + serializedName: "QuickBooks", + type: { + name: "Composite", + className: "QuickBooksLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionProperties: { + serializedName: "typeProperties.connectionProperties", + type: { + name: "any" + } + }, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + companyId: { + serializedName: "typeProperties.companyId", + required: true, + type: { + name: "any" + } + }, + consumerKey: { + serializedName: "typeProperties.consumerKey", + required: true, + type: { + name: "any" + } + }, + consumerSecret: { + serializedName: "typeProperties.consumerSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessTokenSecret: { + serializedName: "typeProperties.accessTokenSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ServiceNowLinkedService: coreClient.CompositeMapper = { + serializedName: "ServiceNow", + type: { + name: "Composite", + className: "ServiceNowLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ShopifyLinkedService: coreClient.CompositeMapper = { + serializedName: "Shopify", + type: { + name: "Composite", + className: "ShopifyLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13666,18 +14354,100 @@ export const RelationalTableDataset: coreClient.CompositeMapper = { } }; -export const InformixTableDataset: coreClient.CompositeMapper = { - serializedName: "InformixTable", +export const SparkLinkedService: coreClient.CompositeMapper = { + serializedName: "Spark", type: { name: "Composite", - className: "InformixTableDataset", - uberParent: "Dataset", + className: "SparkLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + required: true, + type: { + name: "any" + } + }, + serverType: { + serializedName: "typeProperties.serverType", + type: { + name: "String" + } + }, + thriftTransportProtocol: { + serializedName: "typeProperties.thriftTransportProtocol", + type: { + name: "String" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + httpPath: { + serializedName: "typeProperties.httpPath", + type: { + name: "any" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13686,18 +14456,70 @@ export const InformixTableDataset: coreClient.CompositeMapper = { } }; -export const OdbcTableDataset: coreClient.CompositeMapper = { - serializedName: "OdbcTable", +export const SquareLinkedService: coreClient.CompositeMapper = { + serializedName: "Square", type: { name: "Composite", - className: "OdbcTableDataset", - uberParent: "Dataset", + className: "SquareLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + connectionProperties: { + serializedName: "typeProperties.connectionProperties", + type: { + name: "any" + } + }, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + redirectUri: { + serializedName: "typeProperties.redirectUri", + required: true, + type: { + name: "any" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13706,18 +14528,63 @@ export const OdbcTableDataset: coreClient.CompositeMapper = { } }; -export const MySqlTableDataset: coreClient.CompositeMapper = { - serializedName: "MySqlTable", +export const XeroLinkedService: coreClient.CompositeMapper = { + serializedName: "Xero", type: { name: "Composite", - className: "MySqlTableDataset", - uberParent: "Dataset", + className: "XeroLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + connectionProperties: { + serializedName: "typeProperties.connectionProperties", + type: { + name: "any" + } + }, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + consumerKey: { + serializedName: "typeProperties.consumerKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + privateKey: { + serializedName: "typeProperties.privateKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13726,50 +14593,56 @@ export const MySqlTableDataset: coreClient.CompositeMapper = { } }; -export const PostgreSqlTableDataset: coreClient.CompositeMapper = { - serializedName: "PostgreSqlTable", +export const ZohoLinkedService: coreClient.CompositeMapper = { + serializedName: "Zoho", type: { name: "Composite", - className: "PostgreSqlTableDataset", - uberParent: "Dataset", + className: "ZohoLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + connectionProperties: { + serializedName: "typeProperties.connectionProperties", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", type: { name: "any" } - } - } - } -}; - -export const MicrosoftAccessTableDataset: coreClient.CompositeMapper = { - serializedName: "MicrosoftAccessTable", - type: { - name: "Composite", - className: "MicrosoftAccessTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13778,18 +14651,31 @@ export const MicrosoftAccessTableDataset: coreClient.CompositeMapper = { } }; -export const SalesforceObjectDataset: coreClient.CompositeMapper = { - serializedName: "SalesforceObject", +export const VerticaLinkedService: coreClient.CompositeMapper = { + serializedName: "Vertica", type: { name: "Composite", - className: "SalesforceObjectDataset", - uberParent: "Dataset", + className: "VerticaLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - objectApiName: { - serializedName: "typeProperties.objectApiName", + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13798,18 +14684,31 @@ export const SalesforceObjectDataset: coreClient.CompositeMapper = { } }; -export const SalesforceServiceCloudObjectDataset: coreClient.CompositeMapper = { - serializedName: "SalesforceServiceCloudObject", +export const NetezzaLinkedService: coreClient.CompositeMapper = { + serializedName: "Netezza", type: { name: "Composite", - className: "SalesforceServiceCloudObjectDataset", - uberParent: "Dataset", + className: "NetezzaLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - objectApiName: { - serializedName: "typeProperties.objectApiName", + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13818,18 +14717,56 @@ export const SalesforceServiceCloudObjectDataset: coreClient.CompositeMapper = { } }; -export const SybaseTableDataset: coreClient.CompositeMapper = { - serializedName: "SybaseTable", +export const SalesforceMarketingCloudLinkedService: coreClient.CompositeMapper = { + serializedName: "SalesforceMarketingCloud", type: { name: "Composite", - className: "SybaseTableDataset", - uberParent: "Dataset", + className: "SalesforceMarketingCloudLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + connectionProperties: { + serializedName: "typeProperties.connectionProperties", + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -13838,258 +14775,233 @@ export const SybaseTableDataset: coreClient.CompositeMapper = { } }; -export const SapBwCubeDataset: coreClient.CompositeMapper = { - serializedName: "SapBwCube", - type: { - name: "Composite", - className: "SapBwCubeDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties - } - } -}; - -export const SapCloudForCustomerResourceDataset: coreClient.CompositeMapper = { - serializedName: "SapCloudForCustomerResource", +export const HDInsightOnDemandLinkedService: coreClient.CompositeMapper = { + serializedName: "HDInsightOnDemand", type: { name: "Composite", - className: "SapCloudForCustomerResourceDataset", - uberParent: "Dataset", + className: "HDInsightOnDemandLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - path: { - serializedName: "typeProperties.path", + ...LinkedService.type.modelProperties, + clusterSize: { + serializedName: "typeProperties.clusterSize", + required: true, + type: { + name: "any" + } + }, + timeToLive: { + serializedName: "typeProperties.timeToLive", + required: true, + type: { + name: "any" + } + }, + version: { + serializedName: "typeProperties.version", + required: true, + type: { + name: "any" + } + }, + linkedServiceName: { + serializedName: "typeProperties.linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + hostSubscriptionId: { + serializedName: "typeProperties.hostSubscriptionId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", required: true, type: { name: "any" } - } - } - } -}; - -export const SapEccResourceDataset: coreClient.CompositeMapper = { - serializedName: "SapEccResource", - type: { - name: "Composite", - className: "SapEccResourceDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - path: { - serializedName: "typeProperties.path", + }, + clusterResourceGroup: { + serializedName: "typeProperties.clusterResourceGroup", required: true, type: { name: "any" } - } - } - } -}; - -export const SapHanaTableDataset: coreClient.CompositeMapper = { - serializedName: "SapHanaTable", - type: { - name: "Composite", - className: "SapHanaTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + }, + clusterNamePrefix: { + serializedName: "typeProperties.clusterNamePrefix", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + clusterUserName: { + serializedName: "typeProperties.clusterUserName", type: { name: "any" } - } - } - } -}; - -export const SapOpenHubTableDataset: coreClient.CompositeMapper = { - serializedName: "SapOpenHubTable", - type: { - name: "Composite", - className: "SapOpenHubTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - openHubDestinationName: { - serializedName: "typeProperties.openHubDestinationName", - required: true, + }, + clusterPassword: { + serializedName: "typeProperties.clusterPassword", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clusterSshUserName: { + serializedName: "typeProperties.clusterSshUserName", type: { name: "any" } }, - excludeLastRequest: { - serializedName: "typeProperties.excludeLastRequest", + clusterSshPassword: { + serializedName: "typeProperties.clusterSshPassword", + type: { + name: "Composite", + className: "SecretBase" + } + }, + additionalLinkedServiceNames: { + serializedName: "typeProperties.additionalLinkedServiceNames", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + hcatalogLinkedServiceName: { + serializedName: "typeProperties.hcatalogLinkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + clusterType: { + serializedName: "typeProperties.clusterType", type: { name: "any" } }, - baseRequestId: { - serializedName: "typeProperties.baseRequestId", + sparkVersion: { + serializedName: "typeProperties.sparkVersion", type: { name: "any" } - } - } - } -}; - -export const SqlServerTableDataset: coreClient.CompositeMapper = { - serializedName: "SqlServerTable", - type: { - name: "Composite", - className: "SqlServerTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + coreConfiguration: { + serializedName: "typeProperties.coreConfiguration", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + hBaseConfiguration: { + serializedName: "typeProperties.hBaseConfiguration", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + hdfsConfiguration: { + serializedName: "typeProperties.hdfsConfiguration", type: { name: "any" } - } - } - } -}; - -export const RestResourceDataset: coreClient.CompositeMapper = { - serializedName: "RestResource", - type: { - name: "Composite", - className: "RestResourceDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - relativeUrl: { - serializedName: "typeProperties.relativeUrl", + }, + hiveConfiguration: { + serializedName: "typeProperties.hiveConfiguration", type: { name: "any" } }, - requestMethod: { - serializedName: "typeProperties.requestMethod", + mapReduceConfiguration: { + serializedName: "typeProperties.mapReduceConfiguration", type: { name: "any" } }, - requestBody: { - serializedName: "typeProperties.requestBody", + oozieConfiguration: { + serializedName: "typeProperties.oozieConfiguration", type: { name: "any" } }, - additionalHeaders: { - serializedName: "typeProperties.additionalHeaders", + stormConfiguration: { + serializedName: "typeProperties.stormConfiguration", type: { name: "any" } }, - paginationRules: { - serializedName: "typeProperties.paginationRules", + yarnConfiguration: { + serializedName: "typeProperties.yarnConfiguration", type: { name: "any" } - } - } - } -}; - -export const SapTableResourceDataset: coreClient.CompositeMapper = { - serializedName: "SapTableResource", - type: { - name: "Composite", - className: "SapTableResourceDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", - required: true, + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } - } - } - } -}; - -export const WebTableDataset: coreClient.CompositeMapper = { - serializedName: "WebTable", - type: { - name: "Composite", - className: "WebTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - index: { - serializedName: "typeProperties.index", - required: true, + }, + headNodeSize: { + serializedName: "typeProperties.headNodeSize", type: { name: "any" } }, - path: { - serializedName: "typeProperties.path", + dataNodeSize: { + serializedName: "typeProperties.dataNodeSize", type: { name: "any" } - } - } - } -}; - -export const AzureSearchIndexDataset: coreClient.CompositeMapper = { - serializedName: "AzureSearchIndex", - type: { - name: "Composite", - className: "AzureSearchIndexDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - indexName: { - serializedName: "typeProperties.indexName", - required: true, + }, + zookeeperNodeSize: { + serializedName: "typeProperties.zookeeperNodeSize", + type: { + name: "any" + } + }, + scriptActions: { + serializedName: "typeProperties.scriptActions", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ScriptAction" + } + } + } + }, + virtualNetworkId: { + serializedName: "typeProperties.virtualNetworkId", + type: { + name: "any" + } + }, + subnetName: { + serializedName: "typeProperties.subnetName", type: { name: "any" } @@ -14098,70 +15010,63 @@ export const AzureSearchIndexDataset: coreClient.CompositeMapper = { } }; -export const HttpDataset: coreClient.CompositeMapper = { - serializedName: "HttpFile", +export const AzureDataLakeAnalyticsLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureDataLakeAnalytics", type: { name: "Composite", - className: "HttpDataset", - uberParent: "Dataset", + className: "AzureDataLakeAnalyticsLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - relativeUrl: { - serializedName: "typeProperties.relativeUrl", + ...LinkedService.type.modelProperties, + accountName: { + serializedName: "typeProperties.accountName", + required: true, type: { name: "any" } }, - requestMethod: { - serializedName: "typeProperties.requestMethod", + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { name: "any" } }, - requestBody: { - serializedName: "typeProperties.requestBody", + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + required: true, type: { name: "any" } }, - additionalHeaders: { - serializedName: "typeProperties.additionalHeaders", + subscriptionId: { + serializedName: "typeProperties.subscriptionId", type: { name: "any" } }, - format: { - serializedName: "typeProperties.format", + resourceGroupName: { + serializedName: "typeProperties.resourceGroupName", type: { - name: "Composite", - className: "DatasetStorageFormat" + name: "any" } }, - compression: { - serializedName: "typeProperties.compression", + dataLakeAnalyticsUri: { + serializedName: "typeProperties.dataLakeAnalyticsUri", type: { - name: "Composite", - className: "DatasetCompression" + name: "any" } - } - } - } -}; - -export const AmazonMWSObjectDataset: coreClient.CompositeMapper = { - serializedName: "AmazonMWSObject", - type: { - name: "Composite", - className: "AmazonMWSObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -14170,154 +15075,125 @@ export const AmazonMWSObjectDataset: coreClient.CompositeMapper = { } }; -export const AzurePostgreSqlTableDataset: coreClient.CompositeMapper = { - serializedName: "AzurePostgreSqlTable", +export const AzureDatabricksLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureDatabricks", type: { name: "Composite", - className: "AzurePostgreSqlTableDataset", - uberParent: "Dataset", + className: "AzureDatabricksLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + domain: { + serializedName: "typeProperties.domain", + required: true, type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + authentication: { + serializedName: "typeProperties.authentication", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + workspaceResourceId: { + serializedName: "typeProperties.workspaceResourceId", type: { name: "any" } - } - } - } -}; - -export const ConcurObjectDataset: coreClient.CompositeMapper = { - serializedName: "ConcurObject", - type: { - name: "Composite", - className: "ConcurObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + existingClusterId: { + serializedName: "typeProperties.existingClusterId", type: { name: "any" } - } - } - } -}; - -export const CouchbaseTableDataset: coreClient.CompositeMapper = { - serializedName: "CouchbaseTable", - type: { - name: "Composite", - className: "CouchbaseTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + instancePoolId: { + serializedName: "typeProperties.instancePoolId", type: { name: "any" } - } - } - } -}; - -export const DrillTableDataset: coreClient.CompositeMapper = { - serializedName: "DrillTable", - type: { - name: "Composite", - className: "DrillTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + newClusterVersion: { + serializedName: "typeProperties.newClusterVersion", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + newClusterNumOfWorker: { + serializedName: "typeProperties.newClusterNumOfWorker", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + newClusterNodeType: { + serializedName: "typeProperties.newClusterNodeType", type: { name: "any" } - } - } - } -}; - -export const EloquaObjectDataset: coreClient.CompositeMapper = { - serializedName: "EloquaObject", - type: { - name: "Composite", - className: "EloquaObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + newClusterSparkConf: { + serializedName: "typeProperties.newClusterSparkConf", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + newClusterSparkEnvVars: { + serializedName: "typeProperties.newClusterSparkEnvVars", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + newClusterCustomTags: { + serializedName: "typeProperties.newClusterCustomTags", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + newClusterLogDestination: { + serializedName: "typeProperties.newClusterLogDestination", type: { name: "any" } - } - } - } -}; - -export const GoogleBigQueryObjectDataset: coreClient.CompositeMapper = { - serializedName: "GoogleBigQueryObject", - type: { - name: "Composite", - className: "GoogleBigQueryObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + newClusterDriverNodeType: { + serializedName: "typeProperties.newClusterDriverNodeType", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + newClusterInitScripts: { + serializedName: "typeProperties.newClusterInitScripts", + type: { + name: "any" + } + }, + newClusterEnableElasticDisk: { + serializedName: "typeProperties.newClusterEnableElasticDisk", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } }, - dataset: { - serializedName: "typeProperties.dataset", + policyId: { + serializedName: "typeProperties.policyId", type: { name: "any" } @@ -14326,30 +15202,38 @@ export const GoogleBigQueryObjectDataset: coreClient.CompositeMapper = { } }; -export const GreenplumTableDataset: coreClient.CompositeMapper = { - serializedName: "GreenplumTable", +export const AzureDatabricksDeltaLakeLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureDatabricksDeltaLake", type: { name: "Composite", - className: "GreenplumTableDataset", - uberParent: "Dataset", + className: "AzureDatabricksDeltaLakeLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + domain: { + serializedName: "typeProperties.domain", + required: true, type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clusterId: { + serializedName: "typeProperties.clusterId", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -14358,70 +15242,57 @@ export const GreenplumTableDataset: coreClient.CompositeMapper = { } }; -export const HBaseObjectDataset: coreClient.CompositeMapper = { - serializedName: "HBaseObject", +export const ResponsysLinkedService: coreClient.CompositeMapper = { + serializedName: "Responsys", type: { name: "Composite", - className: "HBaseObjectDataset", - uberParent: "Dataset", + className: "ResponsysLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, type: { name: "any" } - } - } - } -}; - -export const HiveObjectDataset: coreClient.CompositeMapper = { - serializedName: "HiveObject", - type: { - name: "Composite", - className: "HiveObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + useHostVerification: { + serializedName: "typeProperties.useHostVerification", type: { name: "any" } - } - } - } -}; - -export const HubspotObjectDataset: coreClient.CompositeMapper = { - serializedName: "HubspotObject", - type: { - name: "Composite", - className: "HubspotObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -14430,70 +15301,53 @@ export const HubspotObjectDataset: coreClient.CompositeMapper = { } }; -export const ImpalaObjectDataset: coreClient.CompositeMapper = { - serializedName: "ImpalaObject", +export const DynamicsAXLinkedService: coreClient.CompositeMapper = { + serializedName: "DynamicsAX", type: { name: "Composite", - className: "ImpalaObjectDataset", - uberParent: "Dataset", + className: "DynamicsAXLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + required: true, type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + required: true, type: { name: "any" } - } - } - } -}; - -export const JiraObjectDataset: coreClient.CompositeMapper = { - serializedName: "JiraObject", - type: { - name: "Composite", - className: "JiraObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + aadResourceId: { + serializedName: "typeProperties.aadResourceId", + required: true, type: { name: "any" } - } - } - } -}; - -export const MagentoObjectDataset: coreClient.CompositeMapper = { - serializedName: "MagentoObject", - type: { - name: "Composite", - className: "MagentoObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -14502,38 +15356,57 @@ export const MagentoObjectDataset: coreClient.CompositeMapper = { } }; -export const MariaDBTableDataset: coreClient.CompositeMapper = { - serializedName: "MariaDBTable", +export const OracleServiceCloudLinkedService: coreClient.CompositeMapper = { + serializedName: "OracleServiceCloud", type: { name: "Composite", - className: "MariaDBTableDataset", - uberParent: "Dataset", + className: "OracleServiceCloudLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, type: { name: "any" } - } - } - } -}; - -export const AzureMariaDBTableDataset: coreClient.CompositeMapper = { - serializedName: "AzureMariaDBTable", - type: { - name: "Composite", - className: "AzureMariaDBTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + username: { + serializedName: "typeProperties.username", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -14542,102 +15415,83 @@ export const AzureMariaDBTableDataset: coreClient.CompositeMapper = { } }; -export const MarketoObjectDataset: coreClient.CompositeMapper = { - serializedName: "MarketoObject", +export const GoogleAdWordsLinkedService: coreClient.CompositeMapper = { + serializedName: "GoogleAdWords", type: { name: "Composite", - className: "MarketoObjectDataset", - uberParent: "Dataset", + className: "GoogleAdWordsLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + clientCustomerID: { + serializedName: "typeProperties.clientCustomerID", + required: true, type: { name: "any" } - } - } - } -}; - -export const PaypalObjectDataset: coreClient.CompositeMapper = { - serializedName: "PaypalObject", - type: { - name: "Composite", - className: "PaypalObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + developerToken: { + serializedName: "typeProperties.developerToken", type: { - name: "any" + name: "Composite", + className: "SecretBase" } - } - } - } -}; - -export const PhoenixObjectDataset: coreClient.CompositeMapper = { - serializedName: "PhoenixObject", - type: { - name: "Composite", - className: "PhoenixObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + refreshToken: { + serializedName: "typeProperties.refreshToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clientId: { + serializedName: "typeProperties.clientId", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + email: { + serializedName: "typeProperties.email", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + keyFilePath: { + serializedName: "typeProperties.keyFilePath", type: { name: "any" } - } - } - } -}; - -export const PrestoObjectDataset: coreClient.CompositeMapper = { - serializedName: "PrestoObject", - type: { - name: "Composite", - className: "PrestoObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -14646,182 +15500,109 @@ export const PrestoObjectDataset: coreClient.CompositeMapper = { } }; -export const QuickBooksObjectDataset: coreClient.CompositeMapper = { - serializedName: "QuickBooksObject", +export const SapTableLinkedService: coreClient.CompositeMapper = { + serializedName: "SapTable", type: { name: "Composite", - className: "QuickBooksObjectDataset", - uberParent: "Dataset", + className: "SapTableLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", type: { name: "any" } - } - } - } -}; - -export const ServiceNowObjectDataset: coreClient.CompositeMapper = { - serializedName: "ServiceNowObject", - type: { - name: "Composite", - className: "ServiceNowObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + systemNumber: { + serializedName: "typeProperties.systemNumber", type: { name: "any" } - } - } - } -}; - -export const ShopifyObjectDataset: coreClient.CompositeMapper = { - serializedName: "ShopifyObject", - type: { - name: "Composite", - className: "ShopifyObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + clientId: { + serializedName: "typeProperties.clientId", type: { name: "any" } - } - } - } -}; - -export const SparkObjectDataset: coreClient.CompositeMapper = { - serializedName: "SparkObject", - type: { - name: "Composite", - className: "SparkObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + language: { + serializedName: "typeProperties.language", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + systemId: { + serializedName: "typeProperties.systemId", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + userName: { + serializedName: "typeProperties.userName", type: { name: "any" } - } - } - } -}; - -export const SquareObjectDataset: coreClient.CompositeMapper = { - serializedName: "SquareObject", - type: { - name: "Composite", - className: "SquareObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + messageServer: { + serializedName: "typeProperties.messageServer", type: { name: "any" } - } - } - } -}; - -export const XeroObjectDataset: coreClient.CompositeMapper = { - serializedName: "XeroObject", - type: { - name: "Composite", - className: "XeroObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + messageServerService: { + serializedName: "typeProperties.messageServerService", + type: { + name: "any" + } + }, + sncMode: { + serializedName: "typeProperties.sncMode", type: { name: "any" } - } - } - } -}; - -export const ZohoObjectDataset: coreClient.CompositeMapper = { - serializedName: "ZohoObject", - type: { - name: "Composite", - className: "ZohoObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + sncMyName: { + serializedName: "typeProperties.sncMyName", type: { name: "any" } - } - } - } -}; - -export const NetezzaTableDataset: coreClient.CompositeMapper = { - serializedName: "NetezzaTable", - type: { - name: "Composite", - className: "NetezzaTableDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + sncPartnerName: { + serializedName: "typeProperties.sncPartnerName", type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + sncLibraryPath: { + serializedName: "typeProperties.sncLibraryPath", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + sncQop: { + serializedName: "typeProperties.sncQop", + type: { + name: "any" + } + }, + logonGroup: { + serializedName: "typeProperties.logonGroup", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -14830,70 +15611,45 @@ export const NetezzaTableDataset: coreClient.CompositeMapper = { } }; -export const VerticaTableDataset: coreClient.CompositeMapper = { - serializedName: "VerticaTable", +export const AzureDataExplorerLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureDataExplorer", type: { name: "Composite", - className: "VerticaTableDataset", - uberParent: "Dataset", + className: "AzureDataExplorerLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", type: { name: "any" } }, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", type: { - name: "any" + name: "Composite", + className: "SecretBase" } - } - } - } -}; - -export const SalesforceMarketingCloudObjectDataset: coreClient.CompositeMapper = { - serializedName: "SalesforceMarketingCloudObject", - type: { - name: "Composite", - className: "SalesforceMarketingCloudObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + database: { + serializedName: "typeProperties.database", + required: true, type: { name: "any" } - } - } - } -}; - -export const ResponsysObjectDataset: coreClient.CompositeMapper = { - serializedName: "ResponsysObject", - type: { - name: "Composite", - className: "ResponsysObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + tenant: { + serializedName: "typeProperties.tenant", type: { name: "any" } @@ -14902,39 +15658,32 @@ export const ResponsysObjectDataset: coreClient.CompositeMapper = { } }; -export const DynamicsAXResourceDataset: coreClient.CompositeMapper = { - serializedName: "DynamicsAXResource", +export const AzureFunctionLinkedService: coreClient.CompositeMapper = { + serializedName: "AzureFunction", type: { name: "Composite", - className: "DynamicsAXResourceDataset", - uberParent: "Dataset", + className: "AzureFunctionLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - path: { - serializedName: "typeProperties.path", + ...LinkedService.type.modelProperties, + functionAppUrl: { + serializedName: "typeProperties.functionAppUrl", required: true, type: { name: "any" } - } - } - } -}; - -export const OracleServiceCloudObjectDataset: coreClient.CompositeMapper = { - serializedName: "OracleServiceCloudObject", - type: { - name: "Composite", - className: "OracleServiceCloudObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + functionKey: { + serializedName: "typeProperties.functionKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -14943,38 +15692,32 @@ export const OracleServiceCloudObjectDataset: coreClient.CompositeMapper = { } }; -export const AzureDataExplorerTableDataset: coreClient.CompositeMapper = { - serializedName: "AzureDataExplorerTable", +export const SnowflakeLinkedService: coreClient.CompositeMapper = { + serializedName: "Snowflake", type: { name: "Composite", - className: "AzureDataExplorerTableDataset", - uberParent: "Dataset", + className: "SnowflakeLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - table: { - serializedName: "typeProperties.table", + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, type: { name: "any" } - } - } - } -}; - -export const GoogleAdWordsObjectDataset: coreClient.CompositeMapper = { - serializedName: "GoogleAdWordsObject", - type: { - name: "Composite", - className: "GoogleAdWordsObjectDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - tableName: { - serializedName: "typeProperties.tableName", + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -14983,70 +15726,46 @@ export const GoogleAdWordsObjectDataset: coreClient.CompositeMapper = { } }; -export const SnowflakeDataset: coreClient.CompositeMapper = { - serializedName: "SnowflakeTable", +export const SharePointOnlineListLinkedService: coreClient.CompositeMapper = { + serializedName: "SharePointOnlineList", type: { name: "Composite", - className: "SnowflakeDataset", - uberParent: "Dataset", + className: "SharePointOnlineListLinkedService", + uberParent: "LinkedService", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, modelProperties: { - ...Dataset.type.modelProperties, - schemaTypePropertiesSchema: { - serializedName: "typeProperties.schema", + ...LinkedService.type.modelProperties, + siteUrl: { + serializedName: "typeProperties.siteUrl", + required: true, type: { name: "any" } }, - table: { - serializedName: "typeProperties.table", + tenantId: { + serializedName: "typeProperties.tenantId", + required: true, type: { name: "any" } - } - } - } -}; - -export const SharePointOnlineListResourceDataset: coreClient.CompositeMapper = { - serializedName: "SharePointOnlineListResource", - type: { - name: "Composite", - className: "SharePointOnlineListResourceDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - listName: { - serializedName: "typeProperties.listName", + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + required: true, type: { name: "any" } - } - } - } -}; - -export const AzureDatabricksDeltaLakeDataset: coreClient.CompositeMapper = { - serializedName: "AzureDatabricksDeltaLakeDataset", - type: { - name: "Composite", - className: "AzureDatabricksDeltaLakeDataset", - uberParent: "Dataset", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, - modelProperties: { - ...Dataset.type.modelProperties, - table: { - serializedName: "typeProperties.table", + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", type: { - name: "any" + name: "Composite", + className: "SecretBase" } }, - database: { - serializedName: "typeProperties.database", + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", type: { name: "any" } @@ -15179,330 +15898,161 @@ export const RerunTumblingWindowTrigger: coreClient.CompositeMapper = { serializedName: "typeProperties.rerunConcurrency", required: true, type: { - name: "Number" - } - } - } - } -}; - -export const MultiplePipelineTrigger: coreClient.CompositeMapper = { - serializedName: "MultiplePipelineTrigger", - type: { - name: "Composite", - className: "MultiplePipelineTrigger", - uberParent: "Trigger", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: { - serializedName: "type", - clientName: "type" - }, - modelProperties: { - ...Trigger.type.modelProperties, - pipelines: { - serializedName: "pipelines", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "TriggerPipelineReference" - } - } - } - } - } - } -}; - -export const TumblingWindowTrigger: coreClient.CompositeMapper = { - serializedName: "TumblingWindowTrigger", - type: { - name: "Composite", - className: "TumblingWindowTrigger", - uberParent: "Trigger", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, - modelProperties: { - ...Trigger.type.modelProperties, - pipeline: { - serializedName: "pipeline", - type: { - name: "Composite", - className: "TriggerPipelineReference" - } - }, - frequency: { - serializedName: "typeProperties.frequency", - required: true, - type: { - name: "String" - } - }, - interval: { - serializedName: "typeProperties.interval", - required: true, - type: { - name: "Number" - } - }, - startTime: { - serializedName: "typeProperties.startTime", - required: true, - type: { - name: "DateTime" - } - }, - endTime: { - serializedName: "typeProperties.endTime", - type: { - name: "DateTime" - } - }, - delay: { - serializedName: "typeProperties.delay", - type: { - name: "any" - } - }, - maxConcurrency: { - constraints: { - InclusiveMaximum: 50, - InclusiveMinimum: 1 - }, - serializedName: "typeProperties.maxConcurrency", - required: true, - type: { - name: "Number" - } - }, - retryPolicy: { - serializedName: "typeProperties.retryPolicy", - type: { - name: "Composite", - className: "RetryPolicy" - } - }, - dependsOn: { - serializedName: "typeProperties.dependsOn", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "DependencyReference" - } - } - } - } - } - } -}; - -export const ChainingTrigger: coreClient.CompositeMapper = { - serializedName: "ChainingTrigger", - type: { - name: "Composite", - className: "ChainingTrigger", - uberParent: "Trigger", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, - modelProperties: { - ...Trigger.type.modelProperties, - pipeline: { - serializedName: "pipeline", - type: { - name: "Composite", - className: "TriggerPipelineReference" - } - }, - dependsOn: { - serializedName: "typeProperties.dependsOn", - required: true, - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "PipelineReference" - } - } - } - }, - runDimension: { - serializedName: "typeProperties.runDimension", - required: true, - type: { - name: "String" - } - } - } - } -}; - -export const MappingDataFlow: coreClient.CompositeMapper = { - serializedName: "MappingDataFlow", - type: { - name: "Composite", - className: "MappingDataFlow", - uberParent: "DataFlow", - polymorphicDiscriminator: DataFlow.type.polymorphicDiscriminator, - modelProperties: { - ...DataFlow.type.modelProperties, - sources: { - serializedName: "typeProperties.sources", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "DataFlowSource" - } - } - } - }, - sinks: { - serializedName: "typeProperties.sinks", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "DataFlowSink" - } - } - } - }, - transformations: { - serializedName: "typeProperties.transformations", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "Transformation" - } - } - } - }, - script: { - serializedName: "typeProperties.script", - type: { - name: "String" - } - } - } - } -}; - -export const DataFlowDebugResource: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "DataFlowDebugResource", - modelProperties: { - ...SubResourceDebugResource.type.modelProperties, - properties: { - serializedName: "properties", - type: { - name: "Composite", - className: "DataFlow" + name: "Number" } } } } }; -export const DatasetDebugResource: coreClient.CompositeMapper = { +export const MultiplePipelineTrigger: coreClient.CompositeMapper = { + serializedName: "MultiplePipelineTrigger", type: { name: "Composite", - className: "DatasetDebugResource", + className: "MultiplePipelineTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, modelProperties: { - ...SubResourceDebugResource.type.modelProperties, - properties: { - serializedName: "properties", + ...Trigger.type.modelProperties, + pipelines: { + serializedName: "pipelines", type: { - name: "Composite", - className: "Dataset" + name: "Sequence", + element: { + type: { + name: "Composite", + className: "TriggerPipelineReference" + } + } } } } } }; -export const LinkedServiceDebugResource: coreClient.CompositeMapper = { +export const TumblingWindowTrigger: coreClient.CompositeMapper = { + serializedName: "TumblingWindowTrigger", type: { name: "Composite", - className: "LinkedServiceDebugResource", + className: "TumblingWindowTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, modelProperties: { - ...SubResourceDebugResource.type.modelProperties, - properties: { - serializedName: "properties", + ...Trigger.type.modelProperties, + pipeline: { + serializedName: "pipeline", type: { name: "Composite", - className: "LinkedService" + className: "TriggerPipelineReference" } - } - } - } -}; - -export const ManagedIntegrationRuntime: coreClient.CompositeMapper = { - serializedName: "Managed", - type: { - name: "Composite", - className: "ManagedIntegrationRuntime", - uberParent: "IntegrationRuntime", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: IntegrationRuntime.type.polymorphicDiscriminator, - modelProperties: { - ...IntegrationRuntime.type.modelProperties, - state: { - serializedName: "state", - readOnly: true, + }, + frequency: { + serializedName: "typeProperties.frequency", + required: true, type: { name: "String" } }, - managedVirtualNetwork: { - serializedName: "managedVirtualNetwork", + interval: { + serializedName: "typeProperties.interval", + required: true, type: { - name: "Composite", - className: "ManagedVirtualNetworkReference" + name: "Number" } }, - computeProperties: { - serializedName: "typeProperties.computeProperties", + startTime: { + serializedName: "typeProperties.startTime", + required: true, type: { - name: "Composite", - className: "IntegrationRuntimeComputeProperties" + name: "DateTime" } }, - ssisProperties: { - serializedName: "typeProperties.ssisProperties", + endTime: { + serializedName: "typeProperties.endTime", + type: { + name: "DateTime" + } + }, + delay: { + serializedName: "typeProperties.delay", + type: { + name: "any" + } + }, + maxConcurrency: { + constraints: { + InclusiveMaximum: 50, + InclusiveMinimum: 1 + }, + serializedName: "typeProperties.maxConcurrency", + required: true, + type: { + name: "Number" + } + }, + retryPolicy: { + serializedName: "typeProperties.retryPolicy", type: { name: "Composite", - className: "IntegrationRuntimeSsisProperties" + className: "RetryPolicy" + } + }, + dependsOn: { + serializedName: "typeProperties.dependsOn", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DependencyReference" + } + } } } } } }; -export const SelfHostedIntegrationRuntime: coreClient.CompositeMapper = { - serializedName: "SelfHosted", +export const ChainingTrigger: coreClient.CompositeMapper = { + serializedName: "ChainingTrigger", type: { name: "Composite", - className: "SelfHostedIntegrationRuntime", - uberParent: "IntegrationRuntime", + className: "ChainingTrigger", + uberParent: "Trigger", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: IntegrationRuntime.type.polymorphicDiscriminator, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, modelProperties: { - ...IntegrationRuntime.type.modelProperties, - linkedInfo: { - serializedName: "typeProperties.linkedInfo", + ...Trigger.type.modelProperties, + pipeline: { + serializedName: "pipeline", type: { name: "Composite", - className: "LinkedIntegrationRuntimeType" + className: "TriggerPipelineReference" + } + }, + dependsOn: { + serializedName: "typeProperties.dependsOn", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PipelineReference" + } + } + } + }, + runDimension: { + serializedName: "typeProperties.runDimension", + required: true, + type: { + name: "String" } } } @@ -15903,7 +16453,7 @@ export const JsonFormat: coreClient.CompositeMapper = { filePattern: { serializedName: "filePattern", type: { - name: "String" + name: "any" } }, nestingSeparator: { @@ -17363,7 +17913,7 @@ export const JsonWriteSettings: coreClient.CompositeMapper = { filePattern: { serializedName: "filePattern", type: { - name: "String" + name: "any" } } } @@ -17390,13 +17940,7 @@ export const AvroSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17423,13 +17967,7 @@ export const ExcelSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17456,13 +17994,7 @@ export const ParquetSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17496,13 +18028,7 @@ export const DelimitedTextSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17536,13 +18062,7 @@ export const JsonSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17576,13 +18096,7 @@ export const XmlSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17609,13 +18123,7 @@ export const OrcSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17672,13 +18180,7 @@ export const TabularSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17748,13 +18250,7 @@ export const DocumentDbCollectionSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17798,13 +18294,7 @@ export const CosmosDbSqlApiSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17830,13 +18320,7 @@ export const DynamicsSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17862,13 +18346,7 @@ export const DynamicsCrmSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17894,13 +18372,7 @@ export const CommonDataServiceForAppsSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17926,13 +18398,7 @@ export const RelationalSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17958,13 +18424,7 @@ export const MicrosoftAccessSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -17996,13 +18456,7 @@ export const ODataSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -18034,13 +18488,7 @@ export const SalesforceServiceCloudSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -18096,13 +18544,7 @@ export const RestSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -18128,13 +18570,7 @@ export const FileSystemSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -18200,13 +18636,7 @@ export const AzureDataExplorerSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -18251,13 +18681,52 @@ export const OracleSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" + } + } + } + } +}; + +export const AmazonRdsForOracleSource: coreClient.CompositeMapper = { + serializedName: "AmazonRdsForOracleSource", + type: { + name: "Composite", + className: "AmazonRdsForOracleSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + oracleReaderQuery: { + serializedName: "oracleReaderQuery", + type: { + name: "any" + } + }, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "any" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "AmazonRdsForOraclePartitionSettings" + } + }, + additionalColumns: { + serializedName: "additionalColumns", + type: { + name: "any" } } } @@ -18277,13 +18746,7 @@ export const WebSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -18309,13 +18772,7 @@ export const MongoDbSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -18360,13 +18817,7 @@ export const MongoDbAtlasSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -18411,13 +18862,7 @@ export const MongoDbV2Source: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -18462,13 +18907,7 @@ export const CosmosDbMongoDbApiSource: coreClient.CompositeMapper = { additionalColumns: { serializedName: "additionalColumns", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "AdditionalColumns" - } - } + name: "any" } } } @@ -20063,131 +20502,130 @@ export const SubResource: coreClient.CompositeMapper = { } }; -export const Workspace: coreClient.CompositeMapper = { +export const BigDataPoolResourceInfo: coreClient.CompositeMapper = { type: { name: "Composite", - className: "Workspace", + className: "BigDataPoolResourceInfo", modelProperties: { ...TrackedResource.type.modelProperties, - identity: { - serializedName: "identity", + provisioningState: { + serializedName: "properties.provisioningState", type: { - name: "Composite", - className: "ManagedIdentity" + name: "String" } }, - defaultDataLakeStorage: { - serializedName: "properties.defaultDataLakeStorage", + autoScale: { + serializedName: "properties.autoScale", type: { name: "Composite", - className: "DataLakeStorageAccountDetails" + className: "AutoScaleProperties" } }, - sqlAdministratorLoginPassword: { - serializedName: "properties.sqlAdministratorLoginPassword", + creationDate: { + serializedName: "properties.creationDate", type: { - name: "String" + name: "DateTime" } }, - managedResourceGroupName: { - serializedName: "properties.managedResourceGroupName", + autoPause: { + serializedName: "properties.autoPause", type: { - name: "String" + name: "Composite", + className: "AutoPauseProperties" } }, - provisioningState: { - serializedName: "properties.provisioningState", - readOnly: true, + isComputeIsolationEnabled: { + serializedName: "properties.isComputeIsolationEnabled", type: { - name: "String" + name: "Boolean" } }, - sqlAdministratorLogin: { - serializedName: "properties.sqlAdministratorLogin", + sessionLevelPackagesEnabled: { + serializedName: "properties.sessionLevelPackagesEnabled", type: { - name: "String" + name: "Boolean" } }, - virtualNetworkProfile: { - serializedName: "properties.virtualNetworkProfile", + cacheSize: { + serializedName: "properties.cacheSize", type: { - name: "Composite", - className: "VirtualNetworkProfile" + name: "Number" } }, - connectivityEndpoints: { - serializedName: "properties.connectivityEndpoints", + dynamicExecutorAllocation: { + serializedName: "properties.dynamicExecutorAllocation", type: { - name: "Dictionary", - value: { type: { name: "String" } } + name: "Composite", + className: "DynamicExecutorAllocation" } }, - managedVirtualNetwork: { - serializedName: "properties.managedVirtualNetwork", + sparkEventsFolder: { + serializedName: "properties.sparkEventsFolder", type: { name: "String" } }, - privateEndpointConnections: { - serializedName: "properties.privateEndpointConnections", + nodeCount: { + serializedName: "properties.nodeCount", + type: { + name: "Number" + } + }, + libraryRequirements: { + serializedName: "properties.libraryRequirements", + type: { + name: "Composite", + className: "LibraryRequirements" + } + }, + customLibraries: { + serializedName: "properties.customLibraries", type: { name: "Sequence", element: { type: { name: "Composite", - className: "PrivateEndpointConnection" + className: "LibraryInfo" } } } }, - encryption: { - serializedName: "properties.encryption", + sparkConfigProperties: { + serializedName: "properties.sparkConfigProperties", type: { name: "Composite", - className: "EncryptionDetails" - } - }, - workspaceUID: { - serializedName: "properties.workspaceUID", - readOnly: true, - type: { - name: "Uuid" + className: "LibraryRequirements" } }, - extraProperties: { - serializedName: "properties.extraProperties", - readOnly: true, + sparkVersion: { + serializedName: "properties.sparkVersion", type: { - name: "Dictionary", - value: { type: { name: "any" } } + name: "String" } }, - managedVirtualNetworkSettings: { - serializedName: "properties.managedVirtualNetworkSettings", + defaultSparkLogFolder: { + serializedName: "properties.defaultSparkLogFolder", type: { - name: "Composite", - className: "ManagedVirtualNetworkSettings" + name: "String" } }, - workspaceRepositoryConfiguration: { - serializedName: "properties.workspaceRepositoryConfiguration", + nodeSize: { + serializedName: "properties.nodeSize", type: { - name: "Composite", - className: "WorkspaceRepositoryConfiguration" + name: "String" } }, - purviewConfiguration: { - serializedName: "properties.purviewConfiguration", + nodeSizeFamily: { + serializedName: "properties.nodeSizeFamily", type: { - name: "Composite", - className: "PurviewConfiguration" + name: "String" } }, - adlaResourceId: { - serializedName: "properties.adlaResourceId", + lastSucceededTimestamp: { + serializedName: "properties.lastSucceededTimestamp", readOnly: true, type: { - name: "String" + name: "DateTime" } } } @@ -20265,130 +20703,131 @@ export const SqlPool: coreClient.CompositeMapper = { } }; -export const BigDataPoolResourceInfo: coreClient.CompositeMapper = { +export const Workspace: coreClient.CompositeMapper = { type: { name: "Composite", - className: "BigDataPoolResourceInfo", + className: "Workspace", modelProperties: { ...TrackedResource.type.modelProperties, - provisioningState: { - serializedName: "properties.provisioningState", - type: { - name: "String" - } - }, - autoScale: { - serializedName: "properties.autoScale", + identity: { + serializedName: "identity", type: { name: "Composite", - className: "AutoScaleProperties" - } - }, - creationDate: { - serializedName: "properties.creationDate", - type: { - name: "DateTime" + className: "ManagedIdentity" } }, - autoPause: { - serializedName: "properties.autoPause", + defaultDataLakeStorage: { + serializedName: "properties.defaultDataLakeStorage", type: { name: "Composite", - className: "AutoPauseProperties" + className: "DataLakeStorageAccountDetails" } }, - isComputeIsolationEnabled: { - serializedName: "properties.isComputeIsolationEnabled", + sqlAdministratorLoginPassword: { + serializedName: "properties.sqlAdministratorLoginPassword", type: { - name: "Boolean" + name: "String" } }, - sessionLevelPackagesEnabled: { - serializedName: "properties.sessionLevelPackagesEnabled", + managedResourceGroupName: { + serializedName: "properties.managedResourceGroupName", type: { - name: "Boolean" + name: "String" } }, - cacheSize: { - serializedName: "properties.cacheSize", + provisioningState: { + serializedName: "properties.provisioningState", + readOnly: true, type: { - name: "Number" + name: "String" } }, - dynamicExecutorAllocation: { - serializedName: "properties.dynamicExecutorAllocation", + sqlAdministratorLogin: { + serializedName: "properties.sqlAdministratorLogin", type: { - name: "Composite", - className: "DynamicExecutorAllocation" + name: "String" } }, - sparkEventsFolder: { - serializedName: "properties.sparkEventsFolder", + virtualNetworkProfile: { + serializedName: "properties.virtualNetworkProfile", type: { - name: "String" + name: "Composite", + className: "VirtualNetworkProfile" } }, - nodeCount: { - serializedName: "properties.nodeCount", + connectivityEndpoints: { + serializedName: "properties.connectivityEndpoints", type: { - name: "Number" + name: "Dictionary", + value: { type: { name: "String" } } } }, - libraryRequirements: { - serializedName: "properties.libraryRequirements", + managedVirtualNetwork: { + serializedName: "properties.managedVirtualNetwork", type: { - name: "Composite", - className: "LibraryRequirements" + name: "String" } }, - customLibraries: { - serializedName: "properties.customLibraries", + privateEndpointConnections: { + serializedName: "properties.privateEndpointConnections", type: { name: "Sequence", element: { type: { name: "Composite", - className: "LibraryInfo" + className: "PrivateEndpointConnection" } } } }, - sparkConfigProperties: { - serializedName: "properties.sparkConfigProperties", + encryption: { + serializedName: "properties.encryption", type: { name: "Composite", - className: "LibraryRequirements" + className: "EncryptionDetails" } }, - sparkVersion: { - serializedName: "properties.sparkVersion", + workspaceUID: { + serializedName: "properties.workspaceUID", + readOnly: true, type: { - name: "String" + name: "Uuid" } }, - defaultSparkLogFolder: { - serializedName: "properties.defaultSparkLogFolder", + extraProperties: { + serializedName: "properties.extraProperties", + readOnly: true, type: { - name: "String" + name: "Dictionary", + value: { type: { name: "any" } } } }, - nodeSize: { - serializedName: "properties.nodeSize", + managedVirtualNetworkSettings: { + serializedName: "properties.managedVirtualNetworkSettings", type: { - name: "String" + name: "Composite", + className: "ManagedVirtualNetworkSettings" } }, - nodeSizeFamily: { - serializedName: "properties.nodeSizeFamily", + workspaceRepositoryConfiguration: { + serializedName: "properties.workspaceRepositoryConfiguration", type: { - name: "String" + name: "Composite", + className: "WorkspaceRepositoryConfiguration" } }, - lastSucceededTimestamp: { - serializedName: "properties.lastSucceededTimestamp", + purviewConfiguration: { + serializedName: "properties.purviewConfiguration", + type: { + name: "Composite", + className: "PurviewConfiguration" + } + }, + adlaResourceId: { + serializedName: "properties.adlaResourceId", readOnly: true, type: { - name: "DateTime" + name: "String" } } } @@ -22956,6 +23395,60 @@ export const SqlServerSource: coreClient.CompositeMapper = { } }; +export const AmazonRdsForSqlServerSource: coreClient.CompositeMapper = { + serializedName: "AmazonRdsForSqlServerSource", + type: { + name: "Composite", + className: "AmazonRdsForSqlServerSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + sqlReaderQuery: { + serializedName: "sqlReaderQuery", + type: { + name: "any" + } + }, + sqlReaderStoredProcedureName: { + serializedName: "sqlReaderStoredProcedureName", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + produceAdditionalTypes: { + serializedName: "produceAdditionalTypes", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "any" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "SqlPartitionSettings" + } + } + } + } +}; + export const AzureSqlSource: coreClient.CompositeMapper = { serializedName: "AzureSqlSource", type: { @@ -23953,17 +24446,34 @@ export const TumblingWindowTriggerDependencyReference: coreClient.CompositeMappe } }; -export const LinkedServiceResource: coreClient.CompositeMapper = { +export const SparkConfigurationResource: coreClient.CompositeMapper = { type: { name: "Composite", - className: "LinkedServiceResource", + className: "SparkConfigurationResource", modelProperties: { ...SubResource.type.modelProperties, properties: { serializedName: "properties", type: { name: "Composite", - className: "LinkedService" + className: "SparkConfiguration" + } + } + } + } +}; + +export const DataFlowResource: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowResource", + modelProperties: { + ...SubResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "DataFlow" } } } @@ -23987,6 +24497,57 @@ export const DatasetResource: coreClient.CompositeMapper = { } }; +export const IntegrationRuntimeResource: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeResource", + modelProperties: { + ...SubResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "IntegrationRuntime" + } + } + } + } +}; + +export const LibraryResource: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "LibraryResource", + modelProperties: { + ...SubResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "LibraryResourceProperties" + } + } + } + } +}; + +export const LinkedServiceResource: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedServiceResource", + modelProperties: { + ...SubResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "LinkedService" + } + } + } + } +}; + export const PipelineResource: coreClient.CompositeMapper = { type: { name: "Composite", @@ -24068,40 +24629,6 @@ export const PipelineResource: coreClient.CompositeMapper = { } }; -export const TriggerResource: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "TriggerResource", - modelProperties: { - ...SubResource.type.modelProperties, - properties: { - serializedName: "properties", - type: { - name: "Composite", - className: "Trigger" - } - } - } - } -}; - -export const DataFlowResource: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "DataFlowResource", - modelProperties: { - ...SubResource.type.modelProperties, - properties: { - serializedName: "properties", - type: { - name: "Composite", - className: "DataFlow" - } - } - } - } -}; - export const SparkJobDefinitionResource: coreClient.CompositeMapper = { type: { name: "Composite", @@ -24119,34 +24646,17 @@ export const SparkJobDefinitionResource: coreClient.CompositeMapper = { } }; -export const IntegrationRuntimeResource: coreClient.CompositeMapper = { - type: { - name: "Composite", - className: "IntegrationRuntimeResource", - modelProperties: { - ...SubResource.type.modelProperties, - properties: { - serializedName: "properties", - type: { - name: "Composite", - className: "IntegrationRuntime" - } - } - } - } -}; - -export const LibraryResource: coreClient.CompositeMapper = { +export const TriggerResource: coreClient.CompositeMapper = { type: { name: "Composite", - className: "LibraryResource", + className: "TriggerResource", modelProperties: { ...SubResource.type.modelProperties, properties: { serializedName: "properties", type: { name: "Composite", - className: "LibraryResourceProperties" + className: "Trigger" } } } @@ -24201,12 +24711,12 @@ export const DataFlowDebugSessionExecuteCommandHeaders: coreClient.CompositeMapp }; export let discriminators = { - LinkedService: LinkedService, + DataFlow: DataFlow, + IntegrationRuntime: IntegrationRuntime, Dataset: Dataset, + LinkedService: LinkedService, Activity: Activity, Trigger: Trigger, - DataFlow: DataFlow, - IntegrationRuntime: IntegrationRuntime, SecretBase: SecretBase, DatasetLocation: DatasetLocation, DatasetStorageFormat: DatasetStorageFormat, @@ -24225,11 +24735,110 @@ export let discriminators = { DependencyReference: DependencyReference, "CustomSetupBase.undefined": CustomSetupBase, LinkedIntegrationRuntimeType: LinkedIntegrationRuntimeType, + "DataFlow.MappingDataFlow": MappingDataFlow, + "IntegrationRuntime.Managed": ManagedIntegrationRuntime, + "IntegrationRuntime.SelfHosted": SelfHostedIntegrationRuntime, + "Dataset.AmazonS3Object": AmazonS3Dataset, + "Dataset.Avro": AvroDataset, + "Dataset.Excel": ExcelDataset, + "Dataset.Parquet": ParquetDataset, + "Dataset.DelimitedText": DelimitedTextDataset, + "Dataset.Json": JsonDataset, + "Dataset.Xml": XmlDataset, + "Dataset.Orc": OrcDataset, + "Dataset.Binary": BinaryDataset, + "Dataset.AzureBlob": AzureBlobDataset, + "Dataset.AzureTable": AzureTableDataset, + "Dataset.AzureSqlTable": AzureSqlTableDataset, + "Dataset.AzureSqlMITable": AzureSqlMITableDataset, + "Dataset.AzureSqlDWTable": AzureSqlDWTableDataset, + "Dataset.CassandraTable": CassandraTableDataset, + "Dataset.CustomDataset": CustomDataset, + "Dataset.CosmosDbSqlApiCollection": CosmosDbSqlApiCollectionDataset, + "Dataset.DocumentDbCollection": DocumentDbCollectionDataset, + "Dataset.DynamicsEntity": DynamicsEntityDataset, + "Dataset.DynamicsCrmEntity": DynamicsCrmEntityDataset, + "Dataset.CommonDataServiceForAppsEntity": CommonDataServiceForAppsEntityDataset, + "Dataset.AzureDataLakeStoreFile": AzureDataLakeStoreDataset, + "Dataset.AzureBlobFSFile": AzureBlobFSDataset, + "Dataset.Office365Table": Office365Dataset, + "Dataset.FileShare": FileShareDataset, + "Dataset.MongoDbCollection": MongoDbCollectionDataset, + "Dataset.MongoDbAtlasCollection": MongoDbAtlasCollectionDataset, + "Dataset.MongoDbV2Collection": MongoDbV2CollectionDataset, + "Dataset.CosmosDbMongoDbApiCollection": CosmosDbMongoDbApiCollectionDataset, + "Dataset.ODataResource": ODataResourceDataset, + "Dataset.OracleTable": OracleTableDataset, + "Dataset.AmazonRdsForOracleTable": AmazonRdsForOracleTableDataset, + "Dataset.TeradataTable": TeradataTableDataset, + "Dataset.AzureMySqlTable": AzureMySqlTableDataset, + "Dataset.AmazonRedshiftTable": AmazonRedshiftTableDataset, + "Dataset.Db2Table": Db2TableDataset, + "Dataset.RelationalTable": RelationalTableDataset, + "Dataset.InformixTable": InformixTableDataset, + "Dataset.OdbcTable": OdbcTableDataset, + "Dataset.MySqlTable": MySqlTableDataset, + "Dataset.PostgreSqlTable": PostgreSqlTableDataset, + "Dataset.MicrosoftAccessTable": MicrosoftAccessTableDataset, + "Dataset.SalesforceObject": SalesforceObjectDataset, + "Dataset.SalesforceServiceCloudObject": SalesforceServiceCloudObjectDataset, + "Dataset.SybaseTable": SybaseTableDataset, + "Dataset.SapBwCube": SapBwCubeDataset, + "Dataset.SapCloudForCustomerResource": SapCloudForCustomerResourceDataset, + "Dataset.SapEccResource": SapEccResourceDataset, + "Dataset.SapHanaTable": SapHanaTableDataset, + "Dataset.SapOpenHubTable": SapOpenHubTableDataset, + "Dataset.SqlServerTable": SqlServerTableDataset, + "Dataset.AmazonRdsForSqlServerTable": AmazonRdsForSqlServerTableDataset, + "Dataset.RestResource": RestResourceDataset, + "Dataset.SapTableResource": SapTableResourceDataset, + "Dataset.WebTable": WebTableDataset, + "Dataset.AzureSearchIndex": AzureSearchIndexDataset, + "Dataset.HttpFile": HttpDataset, + "Dataset.AmazonMWSObject": AmazonMWSObjectDataset, + "Dataset.AzurePostgreSqlTable": AzurePostgreSqlTableDataset, + "Dataset.ConcurObject": ConcurObjectDataset, + "Dataset.CouchbaseTable": CouchbaseTableDataset, + "Dataset.DrillTable": DrillTableDataset, + "Dataset.EloquaObject": EloquaObjectDataset, + "Dataset.GoogleBigQueryObject": GoogleBigQueryObjectDataset, + "Dataset.GreenplumTable": GreenplumTableDataset, + "Dataset.HBaseObject": HBaseObjectDataset, + "Dataset.HiveObject": HiveObjectDataset, + "Dataset.HubspotObject": HubspotObjectDataset, + "Dataset.ImpalaObject": ImpalaObjectDataset, + "Dataset.JiraObject": JiraObjectDataset, + "Dataset.MagentoObject": MagentoObjectDataset, + "Dataset.MariaDBTable": MariaDBTableDataset, + "Dataset.AzureMariaDBTable": AzureMariaDBTableDataset, + "Dataset.MarketoObject": MarketoObjectDataset, + "Dataset.PaypalObject": PaypalObjectDataset, + "Dataset.PhoenixObject": PhoenixObjectDataset, + "Dataset.PrestoObject": PrestoObjectDataset, + "Dataset.QuickBooksObject": QuickBooksObjectDataset, + "Dataset.ServiceNowObject": ServiceNowObjectDataset, + "Dataset.ShopifyObject": ShopifyObjectDataset, + "Dataset.SparkObject": SparkObjectDataset, + "Dataset.SquareObject": SquareObjectDataset, + "Dataset.XeroObject": XeroObjectDataset, + "Dataset.ZohoObject": ZohoObjectDataset, + "Dataset.NetezzaTable": NetezzaTableDataset, + "Dataset.VerticaTable": VerticaTableDataset, + "Dataset.SalesforceMarketingCloudObject": SalesforceMarketingCloudObjectDataset, + "Dataset.ResponsysObject": ResponsysObjectDataset, + "Dataset.DynamicsAXResource": DynamicsAXResourceDataset, + "Dataset.OracleServiceCloudObject": OracleServiceCloudObjectDataset, + "Dataset.AzureDataExplorerTable": AzureDataExplorerTableDataset, + "Dataset.GoogleAdWordsObject": GoogleAdWordsObjectDataset, + "Dataset.SnowflakeTable": SnowflakeDataset, + "Dataset.SharePointOnlineListResource": SharePointOnlineListResourceDataset, + "Dataset.AzureDatabricksDeltaLakeDataset": AzureDatabricksDeltaLakeDataset, "LinkedService.AzureStorage": AzureStorageLinkedService, "LinkedService.AzureBlobStorage": AzureBlobStorageLinkedService, "LinkedService.AzureTableStorage": AzureTableStorageLinkedService, "LinkedService.AzureSqlDW": AzureSqlDWLinkedService, "LinkedService.SqlServer": SqlServerLinkedService, + "LinkedService.AmazonRdsForSqlServer": AmazonRdsForSqlServerLinkedService, "LinkedService.AzureSqlDatabase": AzureSqlDatabaseLinkedService, "LinkedService.AzureSqlMI": AzureSqlMILinkedService, "LinkedService.AzureBatch": AzureBatchLinkedService, @@ -24243,6 +24852,7 @@ export let discriminators = { "LinkedService.AzureFileStorage": AzureFileStorageLinkedService, "LinkedService.GoogleCloudStorage": GoogleCloudStorageLinkedService, "LinkedService.Oracle": OracleLinkedService, + "LinkedService.AmazonRdsForOracle": AmazonRdsForOracleLinkedService, "LinkedService.AzureMySql": AzureMySqlLinkedService, "LinkedService.MySql": MySqlLinkedService, "LinkedService.PostgreSql": PostgreSqlLinkedService, @@ -24323,99 +24933,6 @@ export let discriminators = { "LinkedService.AzureFunction": AzureFunctionLinkedService, "LinkedService.Snowflake": SnowflakeLinkedService, "LinkedService.SharePointOnlineList": SharePointOnlineListLinkedService, - "Dataset.AmazonS3Object": AmazonS3Dataset, - "Dataset.Avro": AvroDataset, - "Dataset.Excel": ExcelDataset, - "Dataset.Parquet": ParquetDataset, - "Dataset.DelimitedText": DelimitedTextDataset, - "Dataset.Json": JsonDataset, - "Dataset.Xml": XmlDataset, - "Dataset.Orc": OrcDataset, - "Dataset.Binary": BinaryDataset, - "Dataset.AzureBlob": AzureBlobDataset, - "Dataset.AzureTable": AzureTableDataset, - "Dataset.AzureSqlTable": AzureSqlTableDataset, - "Dataset.AzureSqlMITable": AzureSqlMITableDataset, - "Dataset.AzureSqlDWTable": AzureSqlDWTableDataset, - "Dataset.CassandraTable": CassandraTableDataset, - "Dataset.CustomDataset": CustomDataset, - "Dataset.CosmosDbSqlApiCollection": CosmosDbSqlApiCollectionDataset, - "Dataset.DocumentDbCollection": DocumentDbCollectionDataset, - "Dataset.DynamicsEntity": DynamicsEntityDataset, - "Dataset.DynamicsCrmEntity": DynamicsCrmEntityDataset, - "Dataset.CommonDataServiceForAppsEntity": CommonDataServiceForAppsEntityDataset, - "Dataset.AzureDataLakeStoreFile": AzureDataLakeStoreDataset, - "Dataset.AzureBlobFSFile": AzureBlobFSDataset, - "Dataset.Office365Table": Office365Dataset, - "Dataset.FileShare": FileShareDataset, - "Dataset.MongoDbCollection": MongoDbCollectionDataset, - "Dataset.MongoDbAtlasCollection": MongoDbAtlasCollectionDataset, - "Dataset.MongoDbV2Collection": MongoDbV2CollectionDataset, - "Dataset.CosmosDbMongoDbApiCollection": CosmosDbMongoDbApiCollectionDataset, - "Dataset.ODataResource": ODataResourceDataset, - "Dataset.OracleTable": OracleTableDataset, - "Dataset.TeradataTable": TeradataTableDataset, - "Dataset.AzureMySqlTable": AzureMySqlTableDataset, - "Dataset.AmazonRedshiftTable": AmazonRedshiftTableDataset, - "Dataset.Db2Table": Db2TableDataset, - "Dataset.RelationalTable": RelationalTableDataset, - "Dataset.InformixTable": InformixTableDataset, - "Dataset.OdbcTable": OdbcTableDataset, - "Dataset.MySqlTable": MySqlTableDataset, - "Dataset.PostgreSqlTable": PostgreSqlTableDataset, - "Dataset.MicrosoftAccessTable": MicrosoftAccessTableDataset, - "Dataset.SalesforceObject": SalesforceObjectDataset, - "Dataset.SalesforceServiceCloudObject": SalesforceServiceCloudObjectDataset, - "Dataset.SybaseTable": SybaseTableDataset, - "Dataset.SapBwCube": SapBwCubeDataset, - "Dataset.SapCloudForCustomerResource": SapCloudForCustomerResourceDataset, - "Dataset.SapEccResource": SapEccResourceDataset, - "Dataset.SapHanaTable": SapHanaTableDataset, - "Dataset.SapOpenHubTable": SapOpenHubTableDataset, - "Dataset.SqlServerTable": SqlServerTableDataset, - "Dataset.RestResource": RestResourceDataset, - "Dataset.SapTableResource": SapTableResourceDataset, - "Dataset.WebTable": WebTableDataset, - "Dataset.AzureSearchIndex": AzureSearchIndexDataset, - "Dataset.HttpFile": HttpDataset, - "Dataset.AmazonMWSObject": AmazonMWSObjectDataset, - "Dataset.AzurePostgreSqlTable": AzurePostgreSqlTableDataset, - "Dataset.ConcurObject": ConcurObjectDataset, - "Dataset.CouchbaseTable": CouchbaseTableDataset, - "Dataset.DrillTable": DrillTableDataset, - "Dataset.EloquaObject": EloquaObjectDataset, - "Dataset.GoogleBigQueryObject": GoogleBigQueryObjectDataset, - "Dataset.GreenplumTable": GreenplumTableDataset, - "Dataset.HBaseObject": HBaseObjectDataset, - "Dataset.HiveObject": HiveObjectDataset, - "Dataset.HubspotObject": HubspotObjectDataset, - "Dataset.ImpalaObject": ImpalaObjectDataset, - "Dataset.JiraObject": JiraObjectDataset, - "Dataset.MagentoObject": MagentoObjectDataset, - "Dataset.MariaDBTable": MariaDBTableDataset, - "Dataset.AzureMariaDBTable": AzureMariaDBTableDataset, - "Dataset.MarketoObject": MarketoObjectDataset, - "Dataset.PaypalObject": PaypalObjectDataset, - "Dataset.PhoenixObject": PhoenixObjectDataset, - "Dataset.PrestoObject": PrestoObjectDataset, - "Dataset.QuickBooksObject": QuickBooksObjectDataset, - "Dataset.ServiceNowObject": ServiceNowObjectDataset, - "Dataset.ShopifyObject": ShopifyObjectDataset, - "Dataset.SparkObject": SparkObjectDataset, - "Dataset.SquareObject": SquareObjectDataset, - "Dataset.XeroObject": XeroObjectDataset, - "Dataset.ZohoObject": ZohoObjectDataset, - "Dataset.NetezzaTable": NetezzaTableDataset, - "Dataset.VerticaTable": VerticaTableDataset, - "Dataset.SalesforceMarketingCloudObject": SalesforceMarketingCloudObjectDataset, - "Dataset.ResponsysObject": ResponsysObjectDataset, - "Dataset.DynamicsAXResource": DynamicsAXResourceDataset, - "Dataset.OracleServiceCloudObject": OracleServiceCloudObjectDataset, - "Dataset.AzureDataExplorerTable": AzureDataExplorerTableDataset, - "Dataset.GoogleAdWordsObject": GoogleAdWordsObjectDataset, - "Dataset.SnowflakeTable": SnowflakeDataset, - "Dataset.SharePointOnlineListResource": SharePointOnlineListResourceDataset, - "Dataset.AzureDatabricksDeltaLakeDataset": AzureDatabricksDeltaLakeDataset, "Activity.Container": ControlActivity, "Activity.Execution": ExecutionActivity, "Activity.SqlPoolStoredProcedure": SqlPoolStoredProcedureActivity, @@ -24423,9 +24940,6 @@ export let discriminators = { "Trigger.MultiplePipelineTrigger": MultiplePipelineTrigger, "Trigger.TumblingWindowTrigger": TumblingWindowTrigger, "Trigger.ChainingTrigger": ChainingTrigger, - "DataFlow.MappingDataFlow": MappingDataFlow, - "IntegrationRuntime.Managed": ManagedIntegrationRuntime, - "IntegrationRuntime.SelfHosted": SelfHostedIntegrationRuntime, "SecretBase.SecureString": SecureString, "SecretBase.AzureKeyVaultSecret": AzureKeyVaultSecretReference, "DatasetLocation.AzureBlobStorageLocation": AzureBlobStorageLocation, @@ -24506,6 +25020,7 @@ export let discriminators = { "CopySource.HdfsSource": HdfsSource, "CopySource.AzureDataExplorerSource": AzureDataExplorerSource, "CopySource.OracleSource": OracleSource, + "CopySource.AmazonRdsForOracleSource": AmazonRdsForOracleSource, "CopySource.WebSource": WebSource, "CopySource.MongoDbSource": MongoDbSource, "CopySource.MongoDbAtlasSource": MongoDbAtlasSource, @@ -24620,6 +25135,7 @@ export let discriminators = { "CopySource.SapTableSource": SapTableSource, "CopySource.SqlSource": SqlSource, "CopySource.SqlServerSource": SqlServerSource, + "CopySource.AmazonRdsForSqlServerSource": AmazonRdsForSqlServerSource, "CopySource.AzureSqlSource": AzureSqlSource, "CopySource.SqlMISource": SqlMISource, "CopySource.SqlDWSource": SqlDWSource, diff --git a/sdk/synapse/synapse-artifacts/src/models/parameters.ts b/sdk/synapse/synapse-artifacts/src/models/parameters.ts index f51584efb860..fc60f56dd126 100644 --- a/sdk/synapse/synapse-artifacts/src/models/parameters.ts +++ b/sdk/synapse/synapse-artifacts/src/models/parameters.ts @@ -12,21 +12,23 @@ import { OperationQueryParameter } from "@azure/core-client"; import { - LinkedServiceResource as LinkedServiceResourceMapper, + KqlScriptResource as KqlScriptResourceMapper, ArtifactRenameRequest as ArtifactRenameRequestMapper, - DatasetResource as DatasetResourceMapper, - PipelineResource as PipelineResourceMapper, - RunFilterParameters as RunFilterParametersMapper, - TriggerResource as TriggerResourceMapper, + SparkConfigurationResource as SparkConfigurationResourceMapper, DataFlowResource as DataFlowResourceMapper, CreateDataFlowDebugSessionRequest as CreateDataFlowDebugSessionRequestMapper, DataFlowDebugPackage as DataFlowDebugPackageMapper, DeleteDataFlowDebugSessionRequest as DeleteDataFlowDebugSessionRequestMapper, DataFlowDebugCommandRequest as DataFlowDebugCommandRequestMapper, - SqlScriptResource as SqlScriptResourceMapper, - SparkJobDefinitionResource as SparkJobDefinitionResourceMapper, + DatasetResource as DatasetResourceMapper, + GitHubAccessTokenRequest as GitHubAccessTokenRequestMapper, + LinkedServiceResource as LinkedServiceResourceMapper, NotebookResource as NotebookResourceMapper, - GitHubAccessTokenRequest as GitHubAccessTokenRequestMapper + PipelineResource as PipelineResourceMapper, + RunFilterParameters as RunFilterParametersMapper, + SparkJobDefinitionResource as SparkJobDefinitionResourceMapper, + SqlScriptResource as SqlScriptResourceMapper, + TriggerResource as TriggerResourceMapper } from "../models/mappers"; export const accept: OperationParameter = { @@ -56,7 +58,7 @@ export const endpoint: OperationURLParameter = { export const apiVersion: OperationQueryParameter = { parameterPath: "apiVersion", mapper: { - defaultValue: "2019-06-01-preview", + defaultValue: "2021-06-01-preview", isConstant: true, serializedName: "api-version", type: { @@ -65,6 +67,18 @@ export const apiVersion: OperationQueryParameter = { } }; +export const nextLink: OperationURLParameter = { + parameterPath: "nextLink", + mapper: { + serializedName: "nextLink", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + export const contentType: OperationParameter = { parameterPath: ["options", "contentType"], mapper: { @@ -77,20 +91,36 @@ export const contentType: OperationParameter = { } }; -export const linkedService: OperationParameter = { - parameterPath: "linkedService", - mapper: LinkedServiceResourceMapper +export const kqlScript: OperationParameter = { + parameterPath: "kqlScript", + mapper: KqlScriptResourceMapper }; -export const linkedServiceName: OperationURLParameter = { - parameterPath: "linkedServiceName", +export const kqlScriptName: OperationURLParameter = { + parameterPath: "kqlScriptName", mapper: { - constraints: { - Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\\\+?/]*$"), - MaxLength: 260, - MinLength: 1 - }, - serializedName: "linkedServiceName", + serializedName: "kqlScriptName", + required: true, + type: { + name: "String" + } + } +}; + +export const renameRequest: OperationParameter = { + parameterPath: "renameRequest", + mapper: ArtifactRenameRequestMapper +}; + +export const sparkConfiguration: OperationParameter = { + parameterPath: "sparkConfiguration", + mapper: SparkConfigurationResourceMapper +}; + +export const sparkConfigurationName: OperationURLParameter = { + parameterPath: "sparkConfigurationName", + mapper: { + serializedName: "sparkConfigurationName", required: true, type: { name: "String" @@ -123,32 +153,43 @@ export const request: OperationParameter = { mapper: ArtifactRenameRequestMapper }; -export const nextLink: OperationURLParameter = { - parameterPath: "nextLink", +export const apiVersion1: OperationQueryParameter = { + parameterPath: "apiVersion", mapper: { - serializedName: "nextLink", + defaultValue: "2020-12-01", + isConstant: true, + serializedName: "api-version", + type: { + name: "String" + } + } +}; + +export const bigDataPoolName: OperationURLParameter = { + parameterPath: "bigDataPoolName", + mapper: { + serializedName: "bigDataPoolName", required: true, type: { name: "String" } - }, - skipEncoding: true + } }; -export const dataset: OperationParameter = { - parameterPath: "dataset", - mapper: DatasetResourceMapper +export const dataFlow: OperationParameter = { + parameterPath: "dataFlow", + mapper: DataFlowResourceMapper }; -export const datasetName: OperationURLParameter = { - parameterPath: "datasetName", +export const dataFlowName: OperationURLParameter = { + parameterPath: "dataFlowName", mapper: { constraints: { Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\\\+?/]*$"), MaxLength: 260, MinLength: 1 }, - serializedName: "datasetName", + serializedName: "dataFlowName", required: true, type: { name: "String" @@ -156,20 +197,40 @@ export const datasetName: OperationURLParameter = { } }; -export const pipeline: OperationParameter = { - parameterPath: "pipeline", - mapper: PipelineResourceMapper +export const request1: OperationParameter = { + parameterPath: "request", + mapper: CreateDataFlowDebugSessionRequestMapper }; -export const pipelineName: OperationURLParameter = { - parameterPath: "pipelineName", +export const request2: OperationParameter = { + parameterPath: "request", + mapper: DataFlowDebugPackageMapper +}; + +export const request3: OperationParameter = { + parameterPath: "request", + mapper: DeleteDataFlowDebugSessionRequestMapper +}; + +export const request4: OperationParameter = { + parameterPath: "request", + mapper: DataFlowDebugCommandRequestMapper +}; + +export const dataset: OperationParameter = { + parameterPath: "dataset", + mapper: DatasetResourceMapper +}; + +export const datasetName: OperationURLParameter = { + parameterPath: "datasetName", mapper: { constraints: { Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\\\+?/]*$"), MaxLength: 260, MinLength: 1 }, - serializedName: "pipelineName", + serializedName: "datasetName", required: true, type: { name: "String" @@ -177,160 +238,128 @@ export const pipelineName: OperationURLParameter = { } }; -export const parameters: OperationParameter = { - parameterPath: ["options", "parameters"], +export const gitHubAccessTokenRequest: OperationParameter = { + parameterPath: "gitHubAccessTokenRequest", + mapper: GitHubAccessTokenRequestMapper +}; + +export const clientRequestId: OperationParameter = { + parameterPath: ["options", "clientRequestId"], mapper: { - serializedName: "parameters", + serializedName: "x-ms-client-request-id", type: { - name: "Dictionary", - value: { type: { name: "any" } } + name: "String" } } }; -export const referencePipelineRunId: OperationQueryParameter = { - parameterPath: ["options", "referencePipelineRunId"], +export const integrationRuntimeName: OperationURLParameter = { + parameterPath: "integrationRuntimeName", mapper: { - serializedName: "referencePipelineRunId", + serializedName: "integrationRuntimeName", + required: true, type: { name: "String" } } }; -export const isRecovery: OperationQueryParameter = { - parameterPath: ["options", "isRecovery"], +export const libraryName: OperationURLParameter = { + parameterPath: "libraryName", mapper: { - serializedName: "isRecovery", + constraints: { + MaxLength: 100 + }, + serializedName: "libraryName", + required: true, type: { - name: "Boolean" + name: "String" } } }; -export const startActivityName: OperationQueryParameter = { - parameterPath: ["options", "startActivityName"], +export const operationId: OperationURLParameter = { + parameterPath: "operationId", mapper: { - serializedName: "startActivityName", + serializedName: "operationId", + required: true, type: { name: "String" } } }; -export const filterParameters: OperationParameter = { - parameterPath: "filterParameters", - mapper: RunFilterParametersMapper -}; - -export const runId: OperationURLParameter = { - parameterPath: "runId", +export const contentType1: OperationParameter = { + parameterPath: ["options", "contentType"], mapper: { - serializedName: "runId", - required: true, + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", type: { name: "String" } } }; -export const isRecursive: OperationQueryParameter = { - parameterPath: ["options", "isRecursive"], +export const content: OperationParameter = { + parameterPath: "content", mapper: { - serializedName: "isRecursive", + serializedName: "content", + required: true, type: { - name: "Boolean" + name: "Stream" } } }; -export const trigger: OperationParameter = { - parameterPath: "trigger", - mapper: TriggerResourceMapper -}; - -export const triggerName: OperationURLParameter = { - parameterPath: "triggerName", +export const accept1: OperationParameter = { + parameterPath: "accept", mapper: { - constraints: { - Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\\\+?/]*$"), - MaxLength: 260, - MinLength: 1 - }, - serializedName: "triggerName", - required: true, + defaultValue: "application/json", + isConstant: true, + serializedName: "Accept", type: { name: "String" } } }; -export const dataFlow: OperationParameter = { - parameterPath: "dataFlow", - mapper: DataFlowResourceMapper -}; - -export const dataFlowName: OperationURLParameter = { - parameterPath: "dataFlowName", +export const comp: OperationQueryParameter = { + parameterPath: "comp", mapper: { - constraints: { - Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\\\+?/]*$"), - MaxLength: 260, - MinLength: 1 - }, - serializedName: "dataFlowName", - required: true, + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", type: { name: "String" } } }; -export const request1: OperationParameter = { - parameterPath: "request", - mapper: CreateDataFlowDebugSessionRequestMapper -}; - -export const request2: OperationParameter = { - parameterPath: "request", - mapper: DataFlowDebugPackageMapper -}; - -export const request3: OperationParameter = { - parameterPath: "request", - mapper: DeleteDataFlowDebugSessionRequestMapper -}; - -export const request4: OperationParameter = { - parameterPath: "request", - mapper: DataFlowDebugCommandRequestMapper -}; - -export const sqlScript: OperationParameter = { - parameterPath: "sqlScript", - mapper: SqlScriptResourceMapper -}; - -export const sqlScriptName: OperationURLParameter = { - parameterPath: "sqlScriptName", +export const blobConditionAppendPosition: OperationParameter = { + parameterPath: ["options", "blobConditionAppendPosition"], mapper: { - serializedName: "sqlScriptName", - required: true, + serializedName: "x-ms-blob-condition-appendpos", type: { - name: "String" + name: "Number" } } }; -export const sparkJobDefinition: OperationParameter = { - parameterPath: "sparkJobDefinition", - mapper: SparkJobDefinitionResourceMapper +export const linkedService: OperationParameter = { + parameterPath: "linkedService", + mapper: LinkedServiceResourceMapper }; -export const sparkJobDefinitionName: OperationURLParameter = { - parameterPath: "sparkJobDefinitionName", +export const linkedServiceName: OperationURLParameter = { + parameterPath: "linkedServiceName", mapper: { - serializedName: "sparkJobDefinitionName", + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "linkedServiceName", required: true, type: { name: "String" @@ -338,11 +367,6 @@ export const sparkJobDefinitionName: OperationURLParameter = { } }; -export const sparkJobDefinitionAzureResource: OperationParameter = { - parameterPath: "sparkJobDefinitionAzureResource", - mapper: SparkJobDefinitionResourceMapper -}; - export const notebook: OperationParameter = { parameterPath: "notebook", mapper: NotebookResourceMapper @@ -359,10 +383,20 @@ export const notebookName: OperationURLParameter = { } }; -export const sqlPoolName: OperationURLParameter = { - parameterPath: "sqlPoolName", +export const pipeline: OperationParameter = { + parameterPath: "pipeline", + mapper: PipelineResourceMapper +}; + +export const pipelineName: OperationURLParameter = { + parameterPath: "pipelineName", mapper: { - serializedName: "sqlPoolName", + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "pipelineName", required: true, type: { name: "String" @@ -370,119 +404,136 @@ export const sqlPoolName: OperationURLParameter = { } }; -export const bigDataPoolName: OperationURLParameter = { - parameterPath: "bigDataPoolName", +export const parameters: OperationParameter = { + parameterPath: ["options", "parameters"], mapper: { - serializedName: "bigDataPoolName", - required: true, + serializedName: "parameters", type: { - name: "String" + name: "Dictionary", + value: { type: { name: "any" } } } } }; -export const integrationRuntimeName: OperationURLParameter = { - parameterPath: "integrationRuntimeName", +export const referencePipelineRunId: OperationQueryParameter = { + parameterPath: ["options", "referencePipelineRunId"], mapper: { - serializedName: "integrationRuntimeName", - required: true, + serializedName: "referencePipelineRunId", type: { name: "String" } } }; -export const libraryName: OperationURLParameter = { - parameterPath: "libraryName", +export const isRecovery: OperationQueryParameter = { + parameterPath: ["options", "isRecovery"], mapper: { - constraints: { - MaxLength: 100 - }, - serializedName: "libraryName", - required: true, + serializedName: "isRecovery", type: { - name: "String" + name: "Boolean" } } }; -export const operationId: OperationURLParameter = { - parameterPath: "operationId", +export const startActivityName: OperationQueryParameter = { + parameterPath: ["options", "startActivityName"], mapper: { - serializedName: "operationId", - required: true, + serializedName: "startActivityName", type: { name: "String" } } }; -export const contentType1: OperationParameter = { - parameterPath: ["options", "contentType"], +export const filterParameters: OperationParameter = { + parameterPath: "filterParameters", + mapper: RunFilterParametersMapper +}; + +export const runId: OperationURLParameter = { + parameterPath: "runId", mapper: { - defaultValue: "application/octet-stream", - isConstant: true, - serializedName: "Content-Type", + serializedName: "runId", + required: true, type: { name: "String" } } }; -export const content: OperationParameter = { - parameterPath: "content", +export const isRecursive: OperationQueryParameter = { + parameterPath: ["options", "isRecursive"], mapper: { - serializedName: "content", - required: true, + serializedName: "isRecursive", type: { - name: "Stream" + name: "Boolean" } } }; -export const accept1: OperationParameter = { - parameterPath: "accept", +export const sparkJobDefinition: OperationParameter = { + parameterPath: "sparkJobDefinition", + mapper: SparkJobDefinitionResourceMapper +}; + +export const sparkJobDefinitionName: OperationURLParameter = { + parameterPath: "sparkJobDefinitionName", mapper: { - defaultValue: "application/json", - isConstant: true, - serializedName: "Accept", + serializedName: "sparkJobDefinitionName", + required: true, type: { name: "String" } } }; -export const comp: OperationQueryParameter = { - parameterPath: "comp", +export const sparkJobDefinitionAzureResource: OperationParameter = { + parameterPath: "sparkJobDefinitionAzureResource", + mapper: SparkJobDefinitionResourceMapper +}; + +export const sqlPoolName: OperationURLParameter = { + parameterPath: "sqlPoolName", mapper: { - defaultValue: "appendblock", - isConstant: true, - serializedName: "comp", + serializedName: "sqlPoolName", + required: true, type: { name: "String" } } }; -export const blobConditionAppendPosition: OperationParameter = { - parameterPath: ["options", "blobConditionAppendPosition"], +export const sqlScript: OperationParameter = { + parameterPath: "sqlScript", + mapper: SqlScriptResourceMapper +}; + +export const sqlScriptName: OperationURLParameter = { + parameterPath: "sqlScriptName", mapper: { - serializedName: "x-ms-blob-condition-appendpos", + serializedName: "sqlScriptName", + required: true, type: { - name: "Number" + name: "String" } } }; -export const gitHubAccessTokenRequest: OperationParameter = { - parameterPath: "gitHubAccessTokenRequest", - mapper: GitHubAccessTokenRequestMapper +export const trigger: OperationParameter = { + parameterPath: "trigger", + mapper: TriggerResourceMapper }; -export const clientRequestId: OperationParameter = { - parameterPath: ["options", "clientRequestId"], +export const triggerName: OperationURLParameter = { + parameterPath: "triggerName", mapper: { - serializedName: "x-ms-client-request-id", + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "triggerName", + required: true, type: { name: "String" } diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts index 2eb7af29a91b..3cf583e7ae54 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -20,7 +20,7 @@ import { BigDataPoolsGetResponse } from "../models"; -/** Class representing a BigDataPools. */ +/** Class containing BigDataPools operations. */ export class BigDataPoolsImpl implements BigDataPools { private readonly client: ArtifactsClientContext; @@ -98,7 +98,7 @@ const listOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -114,7 +114,7 @@ const getOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.bigDataPoolName], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts index 331f4dad74d0..249ada38708d 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -7,7 +7,6 @@ */ import { createSpan } from "../tracing"; -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { DataFlowDebugSession } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; @@ -37,7 +36,7 @@ import { } from "../models"; /// -/** Class representing a DataFlowDebugSession. */ +/** Class containing DataFlowDebugSession operations. */ export class DataFlowDebugSessionImpl implements DataFlowDebugSession { private readonly client: ArtifactsClientContext; @@ -422,11 +421,11 @@ const createDataFlowDebugSessionOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.CreateDataFlowDebugSessionResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.request1, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -440,10 +439,10 @@ const queryDataFlowDebugSessionsByWorkspaceOperationSpec: coreClient.OperationSp bodyMapper: Mappers.QueryDataFlowDebugSessionsResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -456,11 +455,11 @@ const addDataFlowOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.AddDataFlowToDebugSessionResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.request2, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -472,11 +471,11 @@ const deleteDataFlowDebugSessionOperationSpec: coreClient.OperationSpec = { responses: { 200: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.request3, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -499,11 +498,11 @@ const executeCommandOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.DataFlowDebugCommandResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.request4, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -517,10 +516,10 @@ const queryDataFlowDebugSessionsByWorkspaceNextOperationSpec: coreClient.Operati bodyMapper: Mappers.QueryDataFlowDebugSessionsResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.nextLink], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowOperations.ts index f3eb704a525b..c1e646e5979f 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowOperations.ts @@ -7,7 +7,6 @@ */ import { createSpan } from "../tracing"; -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { DataFlowOperations } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; @@ -19,21 +18,21 @@ import { PollerLike, PollOperationState, LroEngine } from "@azure/core-lro"; import { LroImpl } from "../lroImpl"; import { DataFlowResource, - DataFlowOperationsGetDataFlowsByWorkspaceNextOptionalParams, - DataFlowOperationsGetDataFlowsByWorkspaceOptionalParams, - DataFlowOperationsCreateOrUpdateDataFlowOptionalParams, - DataFlowOperationsCreateOrUpdateDataFlowResponse, - DataFlowOperationsGetDataFlowOptionalParams, - DataFlowOperationsGetDataFlowResponse, - DataFlowOperationsDeleteDataFlowOptionalParams, + DataFlowGetDataFlowsByWorkspaceNextOptionalParams, + DataFlowGetDataFlowsByWorkspaceOptionalParams, + DataFlowCreateOrUpdateDataFlowOptionalParams, + DataFlowCreateOrUpdateDataFlowResponse, + DataFlowGetDataFlowOptionalParams, + DataFlowGetDataFlowResponse, + DataFlowDeleteDataFlowOptionalParams, ArtifactRenameRequest, - DataFlowOperationsRenameDataFlowOptionalParams, - DataFlowOperationsGetDataFlowsByWorkspaceResponse, - DataFlowOperationsGetDataFlowsByWorkspaceNextResponse + DataFlowRenameDataFlowOptionalParams, + DataFlowGetDataFlowsByWorkspaceResponse, + DataFlowGetDataFlowsByWorkspaceNextResponse } from "../models"; /// -/** Class representing a DataFlowOperations. */ +/** Class containing DataFlowOperations operations. */ export class DataFlowOperationsImpl implements DataFlowOperations { private readonly client: ArtifactsClientContext; @@ -50,7 +49,7 @@ export class DataFlowOperationsImpl implements DataFlowOperations { * @param options The options parameters. */ public listDataFlowsByWorkspace( - options?: DataFlowOperationsGetDataFlowsByWorkspaceOptionalParams + options?: DataFlowGetDataFlowsByWorkspaceOptionalParams ): PagedAsyncIterableIterator { const iter = this.getDataFlowsByWorkspacePagingAll(options); return { @@ -67,7 +66,7 @@ export class DataFlowOperationsImpl implements DataFlowOperations { } private async *getDataFlowsByWorkspacePagingPage( - options?: DataFlowOperationsGetDataFlowsByWorkspaceOptionalParams + options?: DataFlowGetDataFlowsByWorkspaceOptionalParams ): AsyncIterableIterator { let result = await this._getDataFlowsByWorkspace(options); yield result.value || []; @@ -83,7 +82,7 @@ export class DataFlowOperationsImpl implements DataFlowOperations { } private async *getDataFlowsByWorkspacePagingAll( - options?: DataFlowOperationsGetDataFlowsByWorkspaceOptionalParams + options?: DataFlowGetDataFlowsByWorkspaceOptionalParams ): AsyncIterableIterator { for await (const page of this.getDataFlowsByWorkspacePagingPage(options)) { yield* page; @@ -99,11 +98,11 @@ export class DataFlowOperationsImpl implements DataFlowOperations { async beginCreateOrUpdateDataFlow( dataFlowName: string, dataFlow: DataFlowResource, - options?: DataFlowOperationsCreateOrUpdateDataFlowOptionalParams + options?: DataFlowCreateOrUpdateDataFlowOptionalParams ): Promise< PollerLike< - PollOperationState, - DataFlowOperationsCreateOrUpdateDataFlowResponse + PollOperationState, + DataFlowCreateOrUpdateDataFlowResponse > > { const { span } = createSpan( @@ -113,10 +112,10 @@ export class DataFlowOperationsImpl implements DataFlowOperations { const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec - ): Promise => { + ): Promise => { try { const result = await this.client.sendOperationRequest(args, spec); - return result as DataFlowOperationsCreateOrUpdateDataFlowResponse; + return result as DataFlowCreateOrUpdateDataFlowResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -180,8 +179,8 @@ export class DataFlowOperationsImpl implements DataFlowOperations { async beginCreateOrUpdateDataFlowAndWait( dataFlowName: string, dataFlow: DataFlowResource, - options?: DataFlowOperationsCreateOrUpdateDataFlowOptionalParams - ): Promise { + options?: DataFlowCreateOrUpdateDataFlowOptionalParams + ): Promise { const poller = await this.beginCreateOrUpdateDataFlow( dataFlowName, dataFlow, @@ -197,15 +196,15 @@ export class DataFlowOperationsImpl implements DataFlowOperations { */ async getDataFlow( dataFlowName: string, - options?: DataFlowOperationsGetDataFlowOptionalParams - ): Promise { + options?: DataFlowGetDataFlowOptionalParams + ): Promise { const { span } = createSpan("ArtifactsClient-getDataFlow", options || {}); try { const result = await this.client.sendOperationRequest( { dataFlowName, options }, getDataFlowOperationSpec ); - return result as DataFlowOperationsGetDataFlowResponse; + return result as DataFlowGetDataFlowResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -224,7 +223,7 @@ export class DataFlowOperationsImpl implements DataFlowOperations { */ async beginDeleteDataFlow( dataFlowName: string, - options?: DataFlowOperationsDeleteDataFlowOptionalParams + options?: DataFlowDeleteDataFlowOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginDeleteDataFlow", @@ -298,7 +297,7 @@ export class DataFlowOperationsImpl implements DataFlowOperations { */ async beginDeleteDataFlowAndWait( dataFlowName: string, - options?: DataFlowOperationsDeleteDataFlowOptionalParams + options?: DataFlowDeleteDataFlowOptionalParams ): Promise { const poller = await this.beginDeleteDataFlow(dataFlowName, options); return poller.pollUntilDone(); @@ -313,7 +312,7 @@ export class DataFlowOperationsImpl implements DataFlowOperations { async beginRenameDataFlow( dataFlowName: string, request: ArtifactRenameRequest, - options?: DataFlowOperationsRenameDataFlowOptionalParams + options?: DataFlowRenameDataFlowOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginRenameDataFlow", @@ -389,7 +388,7 @@ export class DataFlowOperationsImpl implements DataFlowOperations { async beginRenameDataFlowAndWait( dataFlowName: string, request: ArtifactRenameRequest, - options?: DataFlowOperationsRenameDataFlowOptionalParams + options?: DataFlowRenameDataFlowOptionalParams ): Promise { const poller = await this.beginRenameDataFlow( dataFlowName, @@ -404,8 +403,8 @@ export class DataFlowOperationsImpl implements DataFlowOperations { * @param options The options parameters. */ private async _getDataFlowsByWorkspace( - options?: DataFlowOperationsGetDataFlowsByWorkspaceOptionalParams - ): Promise { + options?: DataFlowGetDataFlowsByWorkspaceOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getDataFlowsByWorkspace", options || {} @@ -415,7 +414,7 @@ export class DataFlowOperationsImpl implements DataFlowOperations { { options }, getDataFlowsByWorkspaceOperationSpec ); - return result as DataFlowOperationsGetDataFlowsByWorkspaceResponse; + return result as DataFlowGetDataFlowsByWorkspaceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -435,8 +434,8 @@ export class DataFlowOperationsImpl implements DataFlowOperations { */ private async _getDataFlowsByWorkspaceNext( nextLink: string, - options?: DataFlowOperationsGetDataFlowsByWorkspaceNextOptionalParams - ): Promise { + options?: DataFlowGetDataFlowsByWorkspaceNextOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getDataFlowsByWorkspaceNext", options || {} @@ -446,7 +445,7 @@ export class DataFlowOperationsImpl implements DataFlowOperations { { nextLink, options }, getDataFlowsByWorkspaceNextOperationSpec ); - return result as DataFlowOperationsGetDataFlowsByWorkspaceNextResponse; + return result as DataFlowGetDataFlowsByWorkspaceNextResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -478,11 +477,11 @@ const createOrUpdateDataFlowOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.DataFlowResource }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.dataFlow, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.dataFlowName], headerParameters: [ Parameters.accept, @@ -500,10 +499,10 @@ const getDataFlowOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.DataFlowResource }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.dataFlowName], headerParameters: [Parameters.accept, Parameters.ifNoneMatch], serializer @@ -517,10 +516,10 @@ const deleteDataFlowOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.dataFlowName], headerParameters: [Parameters.accept], serializer @@ -534,11 +533,11 @@ const renameDataFlowOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.request, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.dataFlowName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -552,10 +551,10 @@ const getDataFlowsByWorkspaceOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.DataFlowListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -568,10 +567,10 @@ const getDataFlowsByWorkspaceNextOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.DataFlowListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.nextLink], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/datasetOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/datasetOperations.ts index 4a7c82ab34c0..eff87ae0e56b 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/datasetOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/datasetOperations.ts @@ -7,7 +7,6 @@ */ import { createSpan } from "../tracing"; -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { DatasetOperations } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; @@ -19,21 +18,21 @@ import { PollerLike, PollOperationState, LroEngine } from "@azure/core-lro"; import { LroImpl } from "../lroImpl"; import { DatasetResource, - DatasetOperationsGetDatasetsByWorkspaceNextOptionalParams, - DatasetOperationsGetDatasetsByWorkspaceOptionalParams, - DatasetOperationsGetDatasetsByWorkspaceResponse, - DatasetOperationsCreateOrUpdateDatasetOptionalParams, - DatasetOperationsCreateOrUpdateDatasetResponse, - DatasetOperationsGetDatasetOptionalParams, - DatasetOperationsGetDatasetResponse, - DatasetOperationsDeleteDatasetOptionalParams, + DatasetGetDatasetsByWorkspaceNextOptionalParams, + DatasetGetDatasetsByWorkspaceOptionalParams, + DatasetGetDatasetsByWorkspaceResponse, + DatasetCreateOrUpdateDatasetOptionalParams, + DatasetCreateOrUpdateDatasetResponse, + DatasetGetDatasetOptionalParams, + DatasetGetDatasetResponse, + DatasetDeleteDatasetOptionalParams, ArtifactRenameRequest, - DatasetOperationsRenameDatasetOptionalParams, - DatasetOperationsGetDatasetsByWorkspaceNextResponse + DatasetRenameDatasetOptionalParams, + DatasetGetDatasetsByWorkspaceNextResponse } from "../models"; /// -/** Class representing a DatasetOperations. */ +/** Class containing DatasetOperations operations. */ export class DatasetOperationsImpl implements DatasetOperations { private readonly client: ArtifactsClientContext; @@ -50,7 +49,7 @@ export class DatasetOperationsImpl implements DatasetOperations { * @param options The options parameters. */ public listDatasetsByWorkspace( - options?: DatasetOperationsGetDatasetsByWorkspaceOptionalParams + options?: DatasetGetDatasetsByWorkspaceOptionalParams ): PagedAsyncIterableIterator { const iter = this.getDatasetsByWorkspacePagingAll(options); return { @@ -67,7 +66,7 @@ export class DatasetOperationsImpl implements DatasetOperations { } private async *getDatasetsByWorkspacePagingPage( - options?: DatasetOperationsGetDatasetsByWorkspaceOptionalParams + options?: DatasetGetDatasetsByWorkspaceOptionalParams ): AsyncIterableIterator { let result = await this._getDatasetsByWorkspace(options); yield result.value || []; @@ -83,7 +82,7 @@ export class DatasetOperationsImpl implements DatasetOperations { } private async *getDatasetsByWorkspacePagingAll( - options?: DatasetOperationsGetDatasetsByWorkspaceOptionalParams + options?: DatasetGetDatasetsByWorkspaceOptionalParams ): AsyncIterableIterator { for await (const page of this.getDatasetsByWorkspacePagingPage(options)) { yield* page; @@ -95,8 +94,8 @@ export class DatasetOperationsImpl implements DatasetOperations { * @param options The options parameters. */ private async _getDatasetsByWorkspace( - options?: DatasetOperationsGetDatasetsByWorkspaceOptionalParams - ): Promise { + options?: DatasetGetDatasetsByWorkspaceOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getDatasetsByWorkspace", options || {} @@ -106,7 +105,7 @@ export class DatasetOperationsImpl implements DatasetOperations { { options }, getDatasetsByWorkspaceOperationSpec ); - return result as DatasetOperationsGetDatasetsByWorkspaceResponse; + return result as DatasetGetDatasetsByWorkspaceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -127,11 +126,11 @@ export class DatasetOperationsImpl implements DatasetOperations { async beginCreateOrUpdateDataset( datasetName: string, dataset: DatasetResource, - options?: DatasetOperationsCreateOrUpdateDatasetOptionalParams + options?: DatasetCreateOrUpdateDatasetOptionalParams ): Promise< PollerLike< - PollOperationState, - DatasetOperationsCreateOrUpdateDatasetResponse + PollOperationState, + DatasetCreateOrUpdateDatasetResponse > > { const { span } = createSpan( @@ -141,10 +140,10 @@ export class DatasetOperationsImpl implements DatasetOperations { const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec - ): Promise => { + ): Promise => { try { const result = await this.client.sendOperationRequest(args, spec); - return result as DatasetOperationsCreateOrUpdateDatasetResponse; + return result as DatasetCreateOrUpdateDatasetResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -208,8 +207,8 @@ export class DatasetOperationsImpl implements DatasetOperations { async beginCreateOrUpdateDatasetAndWait( datasetName: string, dataset: DatasetResource, - options?: DatasetOperationsCreateOrUpdateDatasetOptionalParams - ): Promise { + options?: DatasetCreateOrUpdateDatasetOptionalParams + ): Promise { const poller = await this.beginCreateOrUpdateDataset( datasetName, dataset, @@ -225,15 +224,15 @@ export class DatasetOperationsImpl implements DatasetOperations { */ async getDataset( datasetName: string, - options?: DatasetOperationsGetDatasetOptionalParams - ): Promise { + options?: DatasetGetDatasetOptionalParams + ): Promise { const { span } = createSpan("ArtifactsClient-getDataset", options || {}); try { const result = await this.client.sendOperationRequest( { datasetName, options }, getDatasetOperationSpec ); - return result as DatasetOperationsGetDatasetResponse; + return result as DatasetGetDatasetResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -252,7 +251,7 @@ export class DatasetOperationsImpl implements DatasetOperations { */ async beginDeleteDataset( datasetName: string, - options?: DatasetOperationsDeleteDatasetOptionalParams + options?: DatasetDeleteDatasetOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginDeleteDataset", @@ -326,7 +325,7 @@ export class DatasetOperationsImpl implements DatasetOperations { */ async beginDeleteDatasetAndWait( datasetName: string, - options?: DatasetOperationsDeleteDatasetOptionalParams + options?: DatasetDeleteDatasetOptionalParams ): Promise { const poller = await this.beginDeleteDataset(datasetName, options); return poller.pollUntilDone(); @@ -341,7 +340,7 @@ export class DatasetOperationsImpl implements DatasetOperations { async beginRenameDataset( datasetName: string, request: ArtifactRenameRequest, - options?: DatasetOperationsRenameDatasetOptionalParams + options?: DatasetRenameDatasetOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginRenameDataset", @@ -417,7 +416,7 @@ export class DatasetOperationsImpl implements DatasetOperations { async beginRenameDatasetAndWait( datasetName: string, request: ArtifactRenameRequest, - options?: DatasetOperationsRenameDatasetOptionalParams + options?: DatasetRenameDatasetOptionalParams ): Promise { const poller = await this.beginRenameDataset(datasetName, request, options); return poller.pollUntilDone(); @@ -430,8 +429,8 @@ export class DatasetOperationsImpl implements DatasetOperations { */ private async _getDatasetsByWorkspaceNext( nextLink: string, - options?: DatasetOperationsGetDatasetsByWorkspaceNextOptionalParams - ): Promise { + options?: DatasetGetDatasetsByWorkspaceNextOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getDatasetsByWorkspaceNext", options || {} @@ -441,7 +440,7 @@ export class DatasetOperationsImpl implements DatasetOperations { { nextLink, options }, getDatasetsByWorkspaceNextOperationSpec ); - return result as DatasetOperationsGetDatasetsByWorkspaceNextResponse; + return result as DatasetGetDatasetsByWorkspaceNextResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -464,10 +463,10 @@ const getDatasetsByWorkspaceOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.DatasetListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -489,11 +488,11 @@ const createOrUpdateDatasetOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.DatasetResource }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.dataset, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.datasetName], headerParameters: [ Parameters.accept, @@ -512,10 +511,10 @@ const getDatasetOperationSpec: coreClient.OperationSpec = { }, 304: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.datasetName], headerParameters: [Parameters.accept, Parameters.ifNoneMatch], serializer @@ -529,10 +528,10 @@ const deleteDatasetOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.datasetName], headerParameters: [Parameters.accept], serializer @@ -546,11 +545,11 @@ const renameDatasetOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.request, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.datasetName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -564,10 +563,10 @@ const getDatasetsByWorkspaceNextOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.DatasetListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.nextLink], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/index.ts b/sdk/synapse/synapse-artifacts/src/operations/index.ts index 44e9f4f8cb66..8fb93b0261f2 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/index.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/index.ts @@ -6,20 +6,24 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -export * from "./linkedServiceOperations"; +export * from "./kqlScripts"; +export * from "./kqlScriptOperations"; +export * from "./sparkConfigurationOperations"; +export * from "./bigDataPools"; +export * from "./dataFlowOperations"; +export * from "./dataFlowDebugSession"; export * from "./datasetOperations"; +export * from "./workspaceGitRepoManagement"; +export * from "./integrationRuntimes"; +export * from "./library"; +export * from "./linkedServiceOperations"; +export * from "./notebookOperations"; +export * from "./notebookOperationResult"; export * from "./pipelineOperations"; export * from "./pipelineRunOperations"; +export * from "./sparkJobDefinitionOperations"; +export * from "./sqlPools"; +export * from "./sqlScriptOperations"; export * from "./triggerOperations"; export * from "./triggerRunOperations"; -export * from "./dataFlowOperations"; -export * from "./dataFlowDebugSession"; -export * from "./sqlScriptOperations"; -export * from "./sparkJobDefinitionOperations"; -export * from "./notebookOperations"; export * from "./workspaceOperations"; -export * from "./sqlPools"; -export * from "./bigDataPools"; -export * from "./integrationRuntimes"; -export * from "./library"; -export * from "./workspaceGitRepoManagement"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts index 71b8fb9f646c..29a5f6aa20e7 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -20,7 +20,7 @@ import { IntegrationRuntimesGetResponse } from "../models"; -/** Class representing a IntegrationRuntimes. */ +/** Class containing IntegrationRuntimes operations. */ export class IntegrationRuntimesImpl implements IntegrationRuntimes { private readonly client: ArtifactsClientContext; @@ -98,7 +98,7 @@ const listOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -114,7 +114,7 @@ const getOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.integrationRuntimeName], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/kqlScriptOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/kqlScriptOperations.ts new file mode 100644 index 000000000000..69db3e0b6080 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/kqlScriptOperations.ts @@ -0,0 +1,428 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import { createSpan } from "../tracing"; +import { KqlScriptOperations } from "../operationsInterfaces"; +import * as coreClient from "@azure/core-client"; +import * as coreTracing from "@azure/core-tracing"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClientContext } from "../artifactsClientContext"; +import { PollerLike, PollOperationState, LroEngine } from "@azure/core-lro"; +import { LroImpl } from "../lroImpl"; +import { + KqlScriptResource, + KqlScriptCreateOrUpdateOptionalParams, + KqlScriptCreateOrUpdateResponse, + KqlScriptGetByNameOptionalParams, + KqlScriptGetByNameResponse, + KqlScriptDeleteByNameOptionalParams, + ArtifactRenameRequest, + KqlScriptRenameOptionalParams +} from "../models"; + +/** Class containing KqlScriptOperations operations. */ +export class KqlScriptOperationsImpl implements KqlScriptOperations { + private readonly client: ArtifactsClientContext; + + /** + * Initialize a new instance of the class KqlScriptOperations class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClientContext) { + this.client = client; + } + + /** + * Creates or updates a KQL Script + * @param kqlScriptName KQL script name + * @param kqlScript KQL script + * @param options The options parameters. + */ + async beginCreateOrUpdate( + kqlScriptName: string, + kqlScript: KqlScriptResource, + options?: KqlScriptCreateOrUpdateOptionalParams + ): Promise< + PollerLike< + PollOperationState, + KqlScriptCreateOrUpdateResponse + > + > { + const { span } = createSpan( + "ArtifactsClient-beginCreateOrUpdate", + options || {} + ); + const directSendOperation = async ( + args: coreClient.OperationArguments, + spec: coreClient.OperationSpec + ): Promise => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as KqlScriptCreateOrUpdateResponse; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + const sendOperation = async ( + args: coreClient.OperationArguments, + spec: coreClient.OperationSpec + ) => { + let currentRawResponse: + | coreClient.FullOperationResponse + | undefined = undefined; + const providedCallback = args.options?.onResponse; + const callback: coreClient.RawResponseCallback = ( + rawResponse: coreClient.FullOperationResponse, + flatResponse: unknown + ) => { + currentRawResponse = rawResponse; + providedCallback?.(rawResponse, flatResponse); + }; + const updatedArgs = { + ...args, + options: { + ...args.options, + onResponse: callback + } + }; + const flatResponse = await directSendOperation(updatedArgs, spec); + return { + flatResponse, + rawResponse: { + statusCode: currentRawResponse!.status, + body: currentRawResponse!.parsedBody, + headers: currentRawResponse!.headers.toJSON() + } + }; + }; + + const lro = new LroImpl( + sendOperation, + { kqlScriptName, kqlScript, options }, + createOrUpdateOperationSpec + ); + return new LroEngine(lro, { + resumeFrom: options?.resumeFrom, + intervalInMs: options?.updateIntervalInMs + }); + } + + /** + * Creates or updates a KQL Script + * @param kqlScriptName KQL script name + * @param kqlScript KQL script + * @param options The options parameters. + */ + async beginCreateOrUpdateAndWait( + kqlScriptName: string, + kqlScript: KqlScriptResource, + options?: KqlScriptCreateOrUpdateOptionalParams + ): Promise { + const poller = await this.beginCreateOrUpdate( + kqlScriptName, + kqlScript, + options + ); + return poller.pollUntilDone(); + } + + /** + * Get KQL script by name + * @param kqlScriptName KQL script name + * @param options The options parameters. + */ + async getByName( + kqlScriptName: string, + options?: KqlScriptGetByNameOptionalParams + ): Promise { + const { span } = createSpan("ArtifactsClient-getByName", options || {}); + try { + const result = await this.client.sendOperationRequest( + { kqlScriptName, options }, + getByNameOperationSpec + ); + return result as KqlScriptGetByNameResponse; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Delete KQL script by name + * @param kqlScriptName KQL script name + * @param options The options parameters. + */ + async beginDeleteByName( + kqlScriptName: string, + options?: KqlScriptDeleteByNameOptionalParams + ): Promise, void>> { + const { span } = createSpan( + "ArtifactsClient-beginDeleteByName", + options || {} + ); + const directSendOperation = async ( + args: coreClient.OperationArguments, + spec: coreClient.OperationSpec + ): Promise => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as void; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + const sendOperation = async ( + args: coreClient.OperationArguments, + spec: coreClient.OperationSpec + ) => { + let currentRawResponse: + | coreClient.FullOperationResponse + | undefined = undefined; + const providedCallback = args.options?.onResponse; + const callback: coreClient.RawResponseCallback = ( + rawResponse: coreClient.FullOperationResponse, + flatResponse: unknown + ) => { + currentRawResponse = rawResponse; + providedCallback?.(rawResponse, flatResponse); + }; + const updatedArgs = { + ...args, + options: { + ...args.options, + onResponse: callback + } + }; + const flatResponse = await directSendOperation(updatedArgs, spec); + return { + flatResponse, + rawResponse: { + statusCode: currentRawResponse!.status, + body: currentRawResponse!.parsedBody, + headers: currentRawResponse!.headers.toJSON() + } + }; + }; + + const lro = new LroImpl( + sendOperation, + { kqlScriptName, options }, + deleteByNameOperationSpec + ); + return new LroEngine(lro, { + resumeFrom: options?.resumeFrom, + intervalInMs: options?.updateIntervalInMs + }); + } + + /** + * Delete KQL script by name + * @param kqlScriptName KQL script name + * @param options The options parameters. + */ + async beginDeleteByNameAndWait( + kqlScriptName: string, + options?: KqlScriptDeleteByNameOptionalParams + ): Promise { + const poller = await this.beginDeleteByName(kqlScriptName, options); + return poller.pollUntilDone(); + } + + /** + * Rename KQL script + * @param kqlScriptName KQL script name + * @param renameRequest Rename request + * @param options The options parameters. + */ + async beginRename( + kqlScriptName: string, + renameRequest: ArtifactRenameRequest, + options?: KqlScriptRenameOptionalParams + ): Promise, void>> { + const { span } = createSpan("ArtifactsClient-beginRename", options || {}); + const directSendOperation = async ( + args: coreClient.OperationArguments, + spec: coreClient.OperationSpec + ): Promise => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as void; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + const sendOperation = async ( + args: coreClient.OperationArguments, + spec: coreClient.OperationSpec + ) => { + let currentRawResponse: + | coreClient.FullOperationResponse + | undefined = undefined; + const providedCallback = args.options?.onResponse; + const callback: coreClient.RawResponseCallback = ( + rawResponse: coreClient.FullOperationResponse, + flatResponse: unknown + ) => { + currentRawResponse = rawResponse; + providedCallback?.(rawResponse, flatResponse); + }; + const updatedArgs = { + ...args, + options: { + ...args.options, + onResponse: callback + } + }; + const flatResponse = await directSendOperation(updatedArgs, spec); + return { + flatResponse, + rawResponse: { + statusCode: currentRawResponse!.status, + body: currentRawResponse!.parsedBody, + headers: currentRawResponse!.headers.toJSON() + } + }; + }; + + const lro = new LroImpl( + sendOperation, + { kqlScriptName, renameRequest, options }, + renameOperationSpec + ); + return new LroEngine(lro, { + resumeFrom: options?.resumeFrom, + intervalInMs: options?.updateIntervalInMs + }); + } + + /** + * Rename KQL script + * @param kqlScriptName KQL script name + * @param renameRequest Rename request + * @param options The options parameters. + */ + async beginRenameAndWait( + kqlScriptName: string, + renameRequest: ArtifactRenameRequest, + options?: KqlScriptRenameOptionalParams + ): Promise { + const poller = await this.beginRename( + kqlScriptName, + renameRequest, + options + ); + return poller.pollUntilDone(); + } +} +// Operation Specifications +const serializer = coreClient.createSerializer(Mappers, /* isXml */ false); + +const createOrUpdateOperationSpec: coreClient.OperationSpec = { + path: "/kqlScripts/{kqlScriptName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.KqlScriptResource + }, + 201: { + bodyMapper: Mappers.KqlScriptResource + }, + 202: { + bodyMapper: Mappers.KqlScriptResource + }, + 204: { + bodyMapper: Mappers.KqlScriptResource + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + requestBody: Parameters.kqlScript, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.kqlScriptName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getByNameOperationSpec: coreClient.OperationSpec = { + path: "/kqlScripts/{kqlScriptName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.KqlScriptResource + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.kqlScriptName], + headerParameters: [Parameters.accept], + serializer +}; +const deleteByNameOperationSpec: coreClient.OperationSpec = { + path: "/kqlScripts/{kqlScriptName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.kqlScriptName], + headerParameters: [Parameters.accept], + serializer +}; +const renameOperationSpec: coreClient.OperationSpec = { + path: "/kqlScripts/{kqlScriptName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + requestBody: Parameters.renameRequest, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.kqlScriptName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/kqlScripts.ts b/sdk/synapse/synapse-artifacts/src/operations/kqlScripts.ts new file mode 100644 index 000000000000..5d2020b9625c --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/kqlScripts.ts @@ -0,0 +1,166 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import { createSpan } from "../tracing"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { KqlScripts } from "../operationsInterfaces"; +import * as coreClient from "@azure/core-client"; +import * as coreTracing from "@azure/core-tracing"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClientContext } from "../artifactsClientContext"; +import { + KqlScriptResource, + KqlScriptsGetAllNextOptionalParams, + KqlScriptsGetAllOptionalParams, + KqlScriptsGetAllResponse, + KqlScriptsGetAllNextResponse +} from "../models"; + +/// +/** Class containing KqlScripts operations. */ +export class KqlScriptsImpl implements KqlScripts { + private readonly client: ArtifactsClientContext; + + /** + * Initialize a new instance of the class KqlScripts class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClientContext) { + this.client = client; + } + + /** + * Get all KQL scripts + * @param options The options parameters. + */ + public listAll( + options?: KqlScriptsGetAllOptionalParams + ): PagedAsyncIterableIterator { + const iter = this.getAllPagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getAllPagingPage(options); + } + }; + } + + private async *getAllPagingPage( + options?: KqlScriptsGetAllOptionalParams + ): AsyncIterableIterator { + let result = await this._getAll(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getAllNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getAllPagingAll( + options?: KqlScriptsGetAllOptionalParams + ): AsyncIterableIterator { + for await (const page of this.getAllPagingPage(options)) { + yield* page; + } + } + + /** + * Get all KQL scripts + * @param options The options parameters. + */ + private async _getAll( + options?: KqlScriptsGetAllOptionalParams + ): Promise { + const { span } = createSpan("ArtifactsClient-_getAll", options || {}); + try { + const result = await this.client.sendOperationRequest( + { options }, + getAllOperationSpec + ); + return result as KqlScriptsGetAllResponse; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * GetAllNext + * @param nextLink The nextLink from the previous successful call to the GetAll method. + * @param options The options parameters. + */ + private async _getAllNext( + nextLink: string, + options?: KqlScriptsGetAllNextOptionalParams + ): Promise { + const { span } = createSpan("ArtifactsClient-_getAllNext", options || {}); + try { + const result = await this.client.sendOperationRequest( + { nextLink, options }, + getAllNextOperationSpec + ); + return result as KqlScriptsGetAllNextResponse; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications +const serializer = coreClient.createSerializer(Mappers, /* isXml */ false); + +const getAllOperationSpec: coreClient.OperationSpec = { + path: "/kqlScripts", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.KqlScriptsResourceCollectionResponse + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getAllNextOperationSpec: coreClient.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.KqlScriptsResourceCollectionResponse + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/library.ts b/sdk/synapse/synapse-artifacts/src/operations/library.ts index 9d81a81ac545..3a2b13309db1 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/library.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/library.ts @@ -7,7 +7,6 @@ */ import { createSpan } from "../tracing"; -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { Library } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; @@ -35,7 +34,7 @@ import { } from "../models"; /// -/** Class representing a Library. */ +/** Class containing Library operations. */ export class LibraryImpl implements Library { private readonly client: ArtifactsClientContext; @@ -499,10 +498,10 @@ const listOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.LibraryListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -516,10 +515,10 @@ const flushOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.libraryName], headerParameters: [Parameters.accept], serializer @@ -535,10 +534,10 @@ const getOperationResultOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.OperationResult }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.operationId], headerParameters: [Parameters.accept], serializer @@ -552,10 +551,10 @@ const deleteOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.libraryName], headerParameters: [Parameters.accept], serializer @@ -569,10 +568,10 @@ const getOperationSpec: coreClient.OperationSpec = { }, 304: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.libraryName], headerParameters: [Parameters.accept], serializer @@ -586,10 +585,10 @@ const createOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.libraryName], headerParameters: [Parameters.accept], serializer @@ -600,11 +599,11 @@ const appendOperationSpec: coreClient.OperationSpec = { responses: { 201: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.content, - queryParameters: [Parameters.apiVersion, Parameters.comp], + queryParameters: [Parameters.apiVersion1, Parameters.comp], urlParameters: [Parameters.endpoint, Parameters.libraryName], headerParameters: [ Parameters.contentType1, @@ -622,10 +621,10 @@ const listNextOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.LibraryListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.nextLink], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedServiceOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedServiceOperations.ts index 314541f60807..7e616198cf79 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/linkedServiceOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedServiceOperations.ts @@ -7,7 +7,6 @@ */ import { createSpan } from "../tracing"; -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { LinkedServiceOperations } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; @@ -19,21 +18,21 @@ import { PollerLike, PollOperationState, LroEngine } from "@azure/core-lro"; import { LroImpl } from "../lroImpl"; import { LinkedServiceResource, - LinkedServiceOperationsGetLinkedServicesByWorkspaceNextOptionalParams, - LinkedServiceOperationsGetLinkedServicesByWorkspaceOptionalParams, - LinkedServiceOperationsGetLinkedServicesByWorkspaceResponse, - LinkedServiceOperationsCreateOrUpdateLinkedServiceOptionalParams, - LinkedServiceOperationsCreateOrUpdateLinkedServiceResponse, - LinkedServiceOperationsGetLinkedServiceOptionalParams, - LinkedServiceOperationsGetLinkedServiceResponse, - LinkedServiceOperationsDeleteLinkedServiceOptionalParams, + LinkedServiceGetLinkedServicesByWorkspaceNextOptionalParams, + LinkedServiceGetLinkedServicesByWorkspaceOptionalParams, + LinkedServiceGetLinkedServicesByWorkspaceResponse, + LinkedServiceCreateOrUpdateLinkedServiceOptionalParams, + LinkedServiceCreateOrUpdateLinkedServiceResponse, + LinkedServiceGetLinkedServiceOptionalParams, + LinkedServiceGetLinkedServiceResponse, + LinkedServiceDeleteLinkedServiceOptionalParams, ArtifactRenameRequest, - LinkedServiceOperationsRenameLinkedServiceOptionalParams, - LinkedServiceOperationsGetLinkedServicesByWorkspaceNextResponse + LinkedServiceRenameLinkedServiceOptionalParams, + LinkedServiceGetLinkedServicesByWorkspaceNextResponse } from "../models"; /// -/** Class representing a LinkedServiceOperations. */ +/** Class containing LinkedServiceOperations operations. */ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { private readonly client: ArtifactsClientContext; @@ -50,7 +49,7 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { * @param options The options parameters. */ public listLinkedServicesByWorkspace( - options?: LinkedServiceOperationsGetLinkedServicesByWorkspaceOptionalParams + options?: LinkedServiceGetLinkedServicesByWorkspaceOptionalParams ): PagedAsyncIterableIterator { const iter = this.getLinkedServicesByWorkspacePagingAll(options); return { @@ -67,7 +66,7 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { } private async *getLinkedServicesByWorkspacePagingPage( - options?: LinkedServiceOperationsGetLinkedServicesByWorkspaceOptionalParams + options?: LinkedServiceGetLinkedServicesByWorkspaceOptionalParams ): AsyncIterableIterator { let result = await this._getLinkedServicesByWorkspace(options); yield result.value || []; @@ -83,7 +82,7 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { } private async *getLinkedServicesByWorkspacePagingAll( - options?: LinkedServiceOperationsGetLinkedServicesByWorkspaceOptionalParams + options?: LinkedServiceGetLinkedServicesByWorkspaceOptionalParams ): AsyncIterableIterator { for await (const page of this.getLinkedServicesByWorkspacePagingPage( options @@ -97,8 +96,8 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { * @param options The options parameters. */ private async _getLinkedServicesByWorkspace( - options?: LinkedServiceOperationsGetLinkedServicesByWorkspaceOptionalParams - ): Promise { + options?: LinkedServiceGetLinkedServicesByWorkspaceOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getLinkedServicesByWorkspace", options || {} @@ -108,7 +107,7 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { { options }, getLinkedServicesByWorkspaceOperationSpec ); - return result as LinkedServiceOperationsGetLinkedServicesByWorkspaceResponse; + return result as LinkedServiceGetLinkedServicesByWorkspaceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -129,13 +128,11 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { async beginCreateOrUpdateLinkedService( linkedServiceName: string, linkedService: LinkedServiceResource, - options?: LinkedServiceOperationsCreateOrUpdateLinkedServiceOptionalParams + options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams ): Promise< PollerLike< - PollOperationState< - LinkedServiceOperationsCreateOrUpdateLinkedServiceResponse - >, - LinkedServiceOperationsCreateOrUpdateLinkedServiceResponse + PollOperationState, + LinkedServiceCreateOrUpdateLinkedServiceResponse > > { const { span } = createSpan( @@ -145,10 +142,10 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec - ): Promise => { + ): Promise => { try { const result = await this.client.sendOperationRequest(args, spec); - return result as LinkedServiceOperationsCreateOrUpdateLinkedServiceResponse; + return result as LinkedServiceCreateOrUpdateLinkedServiceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -212,8 +209,8 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { async beginCreateOrUpdateLinkedServiceAndWait( linkedServiceName: string, linkedService: LinkedServiceResource, - options?: LinkedServiceOperationsCreateOrUpdateLinkedServiceOptionalParams - ): Promise { + options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams + ): Promise { const poller = await this.beginCreateOrUpdateLinkedService( linkedServiceName, linkedService, @@ -229,8 +226,8 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { */ async getLinkedService( linkedServiceName: string, - options?: LinkedServiceOperationsGetLinkedServiceOptionalParams - ): Promise { + options?: LinkedServiceGetLinkedServiceOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-getLinkedService", options || {} @@ -240,7 +237,7 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { { linkedServiceName, options }, getLinkedServiceOperationSpec ); - return result as LinkedServiceOperationsGetLinkedServiceResponse; + return result as LinkedServiceGetLinkedServiceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -259,7 +256,7 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { */ async beginDeleteLinkedService( linkedServiceName: string, - options?: LinkedServiceOperationsDeleteLinkedServiceOptionalParams + options?: LinkedServiceDeleteLinkedServiceOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginDeleteLinkedService", @@ -333,7 +330,7 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { */ async beginDeleteLinkedServiceAndWait( linkedServiceName: string, - options?: LinkedServiceOperationsDeleteLinkedServiceOptionalParams + options?: LinkedServiceDeleteLinkedServiceOptionalParams ): Promise { const poller = await this.beginDeleteLinkedService( linkedServiceName, @@ -351,7 +348,7 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { async beginRenameLinkedService( linkedServiceName: string, request: ArtifactRenameRequest, - options?: LinkedServiceOperationsRenameLinkedServiceOptionalParams + options?: LinkedServiceRenameLinkedServiceOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginRenameLinkedService", @@ -427,7 +424,7 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { async beginRenameLinkedServiceAndWait( linkedServiceName: string, request: ArtifactRenameRequest, - options?: LinkedServiceOperationsRenameLinkedServiceOptionalParams + options?: LinkedServiceRenameLinkedServiceOptionalParams ): Promise { const poller = await this.beginRenameLinkedService( linkedServiceName, @@ -445,8 +442,8 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { */ private async _getLinkedServicesByWorkspaceNext( nextLink: string, - options?: LinkedServiceOperationsGetLinkedServicesByWorkspaceNextOptionalParams - ): Promise { + options?: LinkedServiceGetLinkedServicesByWorkspaceNextOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getLinkedServicesByWorkspaceNext", options || {} @@ -456,7 +453,7 @@ export class LinkedServiceOperationsImpl implements LinkedServiceOperations { { nextLink, options }, getLinkedServicesByWorkspaceNextOperationSpec ); - return result as LinkedServiceOperationsGetLinkedServicesByWorkspaceNextResponse; + return result as LinkedServiceGetLinkedServicesByWorkspaceNextResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -479,10 +476,10 @@ const getLinkedServicesByWorkspaceOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.LinkedServiceListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -504,11 +501,11 @@ const createOrUpdateLinkedServiceOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.LinkedServiceResource }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.linkedService, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], headerParameters: [ Parameters.accept, @@ -527,10 +524,10 @@ const getLinkedServiceOperationSpec: coreClient.OperationSpec = { }, 304: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], headerParameters: [Parameters.accept, Parameters.ifNoneMatch], serializer @@ -544,10 +541,10 @@ const deleteLinkedServiceOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], headerParameters: [Parameters.accept], serializer @@ -561,11 +558,11 @@ const renameLinkedServiceOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.request, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -579,10 +576,10 @@ const getLinkedServicesByWorkspaceNextOperationSpec: coreClient.OperationSpec = bodyMapper: Mappers.LinkedServiceListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.nextLink], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebookOperationResult.ts b/sdk/synapse/synapse-artifacts/src/operations/notebookOperationResult.ts new file mode 100644 index 000000000000..7298d1b1b726 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/notebookOperationResult.ts @@ -0,0 +1,76 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import { createSpan } from "../tracing"; +import { NotebookOperationResult } from "../operationsInterfaces"; +import * as coreClient from "@azure/core-client"; +import * as coreTracing from "@azure/core-tracing"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClientContext } from "../artifactsClientContext"; +import { NotebookOperationResultGetOptionalParams } from "../models"; + +/** Class containing NotebookOperationResult operations. */ +export class NotebookOperationResultImpl implements NotebookOperationResult { + private readonly client: ArtifactsClientContext; + + /** + * Initialize a new instance of the class NotebookOperationResult class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClientContext) { + this.client = client; + } + + /** + * Get notebook operation result + * @param operationId Operation ID. + * @param options The options parameters. + */ + async get( + operationId: string, + options?: NotebookOperationResultGetOptionalParams + ): Promise { + const { span } = createSpan("ArtifactsClient-get", options || {}); + try { + const result = await this.client.sendOperationRequest( + { operationId, options }, + getOperationSpec + ); + return result as void; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications +const serializer = coreClient.createSerializer(Mappers, /* isXml */ false); + +const getOperationSpec: coreClient.OperationSpec = { + path: "/notebookOperationResults/{operationId}", + httpMethod: "GET", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion1], + urlParameters: [Parameters.endpoint, Parameters.operationId], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebookOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/notebookOperations.ts index f054a14cfdaa..effb7decc910 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/notebookOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/notebookOperations.ts @@ -7,7 +7,6 @@ */ import { createSpan } from "../tracing"; -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { NotebookOperations } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; @@ -19,25 +18,25 @@ import { PollerLike, PollOperationState, LroEngine } from "@azure/core-lro"; import { LroImpl } from "../lroImpl"; import { NotebookResource, - NotebookOperationsGetNotebooksByWorkspaceNextOptionalParams, - NotebookOperationsGetNotebooksByWorkspaceOptionalParams, - NotebookOperationsGetNotebookSummaryByWorkSpaceNextOptionalParams, - NotebookOperationsGetNotebookSummaryByWorkSpaceOptionalParams, - NotebookOperationsGetNotebooksByWorkspaceResponse, - NotebookOperationsGetNotebookSummaryByWorkSpaceResponse, - NotebookOperationsCreateOrUpdateNotebookOptionalParams, - NotebookOperationsCreateOrUpdateNotebookResponse, - NotebookOperationsGetNotebookOptionalParams, - NotebookOperationsGetNotebookResponse, - NotebookOperationsDeleteNotebookOptionalParams, + NotebookGetNotebooksByWorkspaceNextOptionalParams, + NotebookGetNotebooksByWorkspaceOptionalParams, + NotebookGetNotebookSummaryByWorkSpaceNextOptionalParams, + NotebookGetNotebookSummaryByWorkSpaceOptionalParams, + NotebookGetNotebooksByWorkspaceResponse, + NotebookGetNotebookSummaryByWorkSpaceResponse, + NotebookCreateOrUpdateNotebookOptionalParams, + NotebookCreateOrUpdateNotebookResponse, + NotebookGetNotebookOptionalParams, + NotebookGetNotebookResponse, + NotebookDeleteNotebookOptionalParams, ArtifactRenameRequest, - NotebookOperationsRenameNotebookOptionalParams, - NotebookOperationsGetNotebooksByWorkspaceNextResponse, - NotebookOperationsGetNotebookSummaryByWorkSpaceNextResponse + NotebookRenameNotebookOptionalParams, + NotebookGetNotebooksByWorkspaceNextResponse, + NotebookGetNotebookSummaryByWorkSpaceNextResponse } from "../models"; /// -/** Class representing a NotebookOperations. */ +/** Class containing NotebookOperations operations. */ export class NotebookOperationsImpl implements NotebookOperations { private readonly client: ArtifactsClientContext; @@ -54,7 +53,7 @@ export class NotebookOperationsImpl implements NotebookOperations { * @param options The options parameters. */ public listNotebooksByWorkspace( - options?: NotebookOperationsGetNotebooksByWorkspaceOptionalParams + options?: NotebookGetNotebooksByWorkspaceOptionalParams ): PagedAsyncIterableIterator { const iter = this.getNotebooksByWorkspacePagingAll(options); return { @@ -71,7 +70,7 @@ export class NotebookOperationsImpl implements NotebookOperations { } private async *getNotebooksByWorkspacePagingPage( - options?: NotebookOperationsGetNotebooksByWorkspaceOptionalParams + options?: NotebookGetNotebooksByWorkspaceOptionalParams ): AsyncIterableIterator { let result = await this._getNotebooksByWorkspace(options); yield result.value || []; @@ -87,7 +86,7 @@ export class NotebookOperationsImpl implements NotebookOperations { } private async *getNotebooksByWorkspacePagingAll( - options?: NotebookOperationsGetNotebooksByWorkspaceOptionalParams + options?: NotebookGetNotebooksByWorkspaceOptionalParams ): AsyncIterableIterator { for await (const page of this.getNotebooksByWorkspacePagingPage(options)) { yield* page; @@ -99,7 +98,7 @@ export class NotebookOperationsImpl implements NotebookOperations { * @param options The options parameters. */ public listNotebookSummaryByWorkSpace( - options?: NotebookOperationsGetNotebookSummaryByWorkSpaceOptionalParams + options?: NotebookGetNotebookSummaryByWorkSpaceOptionalParams ): PagedAsyncIterableIterator { const iter = this.getNotebookSummaryByWorkSpacePagingAll(options); return { @@ -116,7 +115,7 @@ export class NotebookOperationsImpl implements NotebookOperations { } private async *getNotebookSummaryByWorkSpacePagingPage( - options?: NotebookOperationsGetNotebookSummaryByWorkSpaceOptionalParams + options?: NotebookGetNotebookSummaryByWorkSpaceOptionalParams ): AsyncIterableIterator { let result = await this._getNotebookSummaryByWorkSpace(options); yield result.value || []; @@ -132,7 +131,7 @@ export class NotebookOperationsImpl implements NotebookOperations { } private async *getNotebookSummaryByWorkSpacePagingAll( - options?: NotebookOperationsGetNotebookSummaryByWorkSpaceOptionalParams + options?: NotebookGetNotebookSummaryByWorkSpaceOptionalParams ): AsyncIterableIterator { for await (const page of this.getNotebookSummaryByWorkSpacePagingPage( options @@ -146,8 +145,8 @@ export class NotebookOperationsImpl implements NotebookOperations { * @param options The options parameters. */ private async _getNotebooksByWorkspace( - options?: NotebookOperationsGetNotebooksByWorkspaceOptionalParams - ): Promise { + options?: NotebookGetNotebooksByWorkspaceOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getNotebooksByWorkspace", options || {} @@ -157,7 +156,7 @@ export class NotebookOperationsImpl implements NotebookOperations { { options }, getNotebooksByWorkspaceOperationSpec ); - return result as NotebookOperationsGetNotebooksByWorkspaceResponse; + return result as NotebookGetNotebooksByWorkspaceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -174,8 +173,8 @@ export class NotebookOperationsImpl implements NotebookOperations { * @param options The options parameters. */ private async _getNotebookSummaryByWorkSpace( - options?: NotebookOperationsGetNotebookSummaryByWorkSpaceOptionalParams - ): Promise { + options?: NotebookGetNotebookSummaryByWorkSpaceOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getNotebookSummaryByWorkSpace", options || {} @@ -185,7 +184,7 @@ export class NotebookOperationsImpl implements NotebookOperations { { options }, getNotebookSummaryByWorkSpaceOperationSpec ); - return result as NotebookOperationsGetNotebookSummaryByWorkSpaceResponse; + return result as NotebookGetNotebookSummaryByWorkSpaceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -206,11 +205,11 @@ export class NotebookOperationsImpl implements NotebookOperations { async beginCreateOrUpdateNotebook( notebookName: string, notebook: NotebookResource, - options?: NotebookOperationsCreateOrUpdateNotebookOptionalParams + options?: NotebookCreateOrUpdateNotebookOptionalParams ): Promise< PollerLike< - PollOperationState, - NotebookOperationsCreateOrUpdateNotebookResponse + PollOperationState, + NotebookCreateOrUpdateNotebookResponse > > { const { span } = createSpan( @@ -220,10 +219,10 @@ export class NotebookOperationsImpl implements NotebookOperations { const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec - ): Promise => { + ): Promise => { try { const result = await this.client.sendOperationRequest(args, spec); - return result as NotebookOperationsCreateOrUpdateNotebookResponse; + return result as NotebookCreateOrUpdateNotebookResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -287,8 +286,8 @@ export class NotebookOperationsImpl implements NotebookOperations { async beginCreateOrUpdateNotebookAndWait( notebookName: string, notebook: NotebookResource, - options?: NotebookOperationsCreateOrUpdateNotebookOptionalParams - ): Promise { + options?: NotebookCreateOrUpdateNotebookOptionalParams + ): Promise { const poller = await this.beginCreateOrUpdateNotebook( notebookName, notebook, @@ -304,15 +303,15 @@ export class NotebookOperationsImpl implements NotebookOperations { */ async getNotebook( notebookName: string, - options?: NotebookOperationsGetNotebookOptionalParams - ): Promise { + options?: NotebookGetNotebookOptionalParams + ): Promise { const { span } = createSpan("ArtifactsClient-getNotebook", options || {}); try { const result = await this.client.sendOperationRequest( { notebookName, options }, getNotebookOperationSpec ); - return result as NotebookOperationsGetNotebookResponse; + return result as NotebookGetNotebookResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -331,7 +330,7 @@ export class NotebookOperationsImpl implements NotebookOperations { */ async beginDeleteNotebook( notebookName: string, - options?: NotebookOperationsDeleteNotebookOptionalParams + options?: NotebookDeleteNotebookOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginDeleteNotebook", @@ -405,7 +404,7 @@ export class NotebookOperationsImpl implements NotebookOperations { */ async beginDeleteNotebookAndWait( notebookName: string, - options?: NotebookOperationsDeleteNotebookOptionalParams + options?: NotebookDeleteNotebookOptionalParams ): Promise { const poller = await this.beginDeleteNotebook(notebookName, options); return poller.pollUntilDone(); @@ -420,7 +419,7 @@ export class NotebookOperationsImpl implements NotebookOperations { async beginRenameNotebook( notebookName: string, request: ArtifactRenameRequest, - options?: NotebookOperationsRenameNotebookOptionalParams + options?: NotebookRenameNotebookOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginRenameNotebook", @@ -496,7 +495,7 @@ export class NotebookOperationsImpl implements NotebookOperations { async beginRenameNotebookAndWait( notebookName: string, request: ArtifactRenameRequest, - options?: NotebookOperationsRenameNotebookOptionalParams + options?: NotebookRenameNotebookOptionalParams ): Promise { const poller = await this.beginRenameNotebook( notebookName, @@ -514,8 +513,8 @@ export class NotebookOperationsImpl implements NotebookOperations { */ private async _getNotebooksByWorkspaceNext( nextLink: string, - options?: NotebookOperationsGetNotebooksByWorkspaceNextOptionalParams - ): Promise { + options?: NotebookGetNotebooksByWorkspaceNextOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getNotebooksByWorkspaceNext", options || {} @@ -525,7 +524,7 @@ export class NotebookOperationsImpl implements NotebookOperations { { nextLink, options }, getNotebooksByWorkspaceNextOperationSpec ); - return result as NotebookOperationsGetNotebooksByWorkspaceNextResponse; + return result as NotebookGetNotebooksByWorkspaceNextResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -545,8 +544,8 @@ export class NotebookOperationsImpl implements NotebookOperations { */ private async _getNotebookSummaryByWorkSpaceNext( nextLink: string, - options?: NotebookOperationsGetNotebookSummaryByWorkSpaceNextOptionalParams - ): Promise { + options?: NotebookGetNotebookSummaryByWorkSpaceNextOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getNotebookSummaryByWorkSpaceNext", options || {} @@ -556,7 +555,7 @@ export class NotebookOperationsImpl implements NotebookOperations { { nextLink, options }, getNotebookSummaryByWorkSpaceNextOperationSpec ); - return result as NotebookOperationsGetNotebookSummaryByWorkSpaceNextResponse; + return result as NotebookGetNotebookSummaryByWorkSpaceNextResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -579,26 +578,26 @@ const getNotebooksByWorkspaceOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.NotebookListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer }; const getNotebookSummaryByWorkSpaceOperationSpec: coreClient.OperationSpec = { - path: "/notebooks/summary", + path: "/notebooksSummary", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.NotebookListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -620,11 +619,11 @@ const createOrUpdateNotebookOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.NotebookResource }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.notebook, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.notebookName], headerParameters: [ Parameters.accept, @@ -643,10 +642,10 @@ const getNotebookOperationSpec: coreClient.OperationSpec = { }, 304: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.notebookName], headerParameters: [Parameters.accept, Parameters.ifNoneMatch], serializer @@ -660,10 +659,10 @@ const deleteNotebookOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.notebookName], headerParameters: [Parameters.accept], serializer @@ -677,11 +676,11 @@ const renameNotebookOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.request, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.notebookName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -695,10 +694,10 @@ const getNotebooksByWorkspaceNextOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.NotebookListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.nextLink], headerParameters: [Parameters.accept], serializer @@ -711,10 +710,10 @@ const getNotebookSummaryByWorkSpaceNextOperationSpec: coreClient.OperationSpec = bodyMapper: Mappers.NotebookListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.nextLink], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineOperations.ts index 100b60d9f67a..908503e12ab6 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipelineOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineOperations.ts @@ -7,7 +7,6 @@ */ import { createSpan } from "../tracing"; -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { PipelineOperations } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; @@ -19,23 +18,23 @@ import { PollerLike, PollOperationState, LroEngine } from "@azure/core-lro"; import { LroImpl } from "../lroImpl"; import { PipelineResource, - PipelineOperationsGetPipelinesByWorkspaceNextOptionalParams, - PipelineOperationsGetPipelinesByWorkspaceOptionalParams, - PipelineOperationsGetPipelinesByWorkspaceResponse, - PipelineOperationsCreateOrUpdatePipelineOptionalParams, - PipelineOperationsCreateOrUpdatePipelineResponse, - PipelineOperationsGetPipelineOptionalParams, - PipelineOperationsGetPipelineResponse, - PipelineOperationsDeletePipelineOptionalParams, + PipelineGetPipelinesByWorkspaceNextOptionalParams, + PipelineGetPipelinesByWorkspaceOptionalParams, + PipelineGetPipelinesByWorkspaceResponse, + PipelineCreateOrUpdatePipelineOptionalParams, + PipelineCreateOrUpdatePipelineResponse, + PipelineGetPipelineOptionalParams, + PipelineGetPipelineResponse, + PipelineDeletePipelineOptionalParams, ArtifactRenameRequest, - PipelineOperationsRenamePipelineOptionalParams, - PipelineOperationsCreatePipelineRunOptionalParams, - PipelineOperationsCreatePipelineRunResponse, - PipelineOperationsGetPipelinesByWorkspaceNextResponse + PipelineRenamePipelineOptionalParams, + PipelineCreatePipelineRunOptionalParams, + PipelineCreatePipelineRunResponse, + PipelineGetPipelinesByWorkspaceNextResponse } from "../models"; /// -/** Class representing a PipelineOperations. */ +/** Class containing PipelineOperations operations. */ export class PipelineOperationsImpl implements PipelineOperations { private readonly client: ArtifactsClientContext; @@ -52,7 +51,7 @@ export class PipelineOperationsImpl implements PipelineOperations { * @param options The options parameters. */ public listPipelinesByWorkspace( - options?: PipelineOperationsGetPipelinesByWorkspaceOptionalParams + options?: PipelineGetPipelinesByWorkspaceOptionalParams ): PagedAsyncIterableIterator { const iter = this.getPipelinesByWorkspacePagingAll(options); return { @@ -69,7 +68,7 @@ export class PipelineOperationsImpl implements PipelineOperations { } private async *getPipelinesByWorkspacePagingPage( - options?: PipelineOperationsGetPipelinesByWorkspaceOptionalParams + options?: PipelineGetPipelinesByWorkspaceOptionalParams ): AsyncIterableIterator { let result = await this._getPipelinesByWorkspace(options); yield result.value || []; @@ -85,7 +84,7 @@ export class PipelineOperationsImpl implements PipelineOperations { } private async *getPipelinesByWorkspacePagingAll( - options?: PipelineOperationsGetPipelinesByWorkspaceOptionalParams + options?: PipelineGetPipelinesByWorkspaceOptionalParams ): AsyncIterableIterator { for await (const page of this.getPipelinesByWorkspacePagingPage(options)) { yield* page; @@ -97,8 +96,8 @@ export class PipelineOperationsImpl implements PipelineOperations { * @param options The options parameters. */ private async _getPipelinesByWorkspace( - options?: PipelineOperationsGetPipelinesByWorkspaceOptionalParams - ): Promise { + options?: PipelineGetPipelinesByWorkspaceOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getPipelinesByWorkspace", options || {} @@ -108,7 +107,7 @@ export class PipelineOperationsImpl implements PipelineOperations { { options }, getPipelinesByWorkspaceOperationSpec ); - return result as PipelineOperationsGetPipelinesByWorkspaceResponse; + return result as PipelineGetPipelinesByWorkspaceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -129,11 +128,11 @@ export class PipelineOperationsImpl implements PipelineOperations { async beginCreateOrUpdatePipeline( pipelineName: string, pipeline: PipelineResource, - options?: PipelineOperationsCreateOrUpdatePipelineOptionalParams + options?: PipelineCreateOrUpdatePipelineOptionalParams ): Promise< PollerLike< - PollOperationState, - PipelineOperationsCreateOrUpdatePipelineResponse + PollOperationState, + PipelineCreateOrUpdatePipelineResponse > > { const { span } = createSpan( @@ -143,10 +142,10 @@ export class PipelineOperationsImpl implements PipelineOperations { const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec - ): Promise => { + ): Promise => { try { const result = await this.client.sendOperationRequest(args, spec); - return result as PipelineOperationsCreateOrUpdatePipelineResponse; + return result as PipelineCreateOrUpdatePipelineResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -210,8 +209,8 @@ export class PipelineOperationsImpl implements PipelineOperations { async beginCreateOrUpdatePipelineAndWait( pipelineName: string, pipeline: PipelineResource, - options?: PipelineOperationsCreateOrUpdatePipelineOptionalParams - ): Promise { + options?: PipelineCreateOrUpdatePipelineOptionalParams + ): Promise { const poller = await this.beginCreateOrUpdatePipeline( pipelineName, pipeline, @@ -227,15 +226,15 @@ export class PipelineOperationsImpl implements PipelineOperations { */ async getPipeline( pipelineName: string, - options?: PipelineOperationsGetPipelineOptionalParams - ): Promise { + options?: PipelineGetPipelineOptionalParams + ): Promise { const { span } = createSpan("ArtifactsClient-getPipeline", options || {}); try { const result = await this.client.sendOperationRequest( { pipelineName, options }, getPipelineOperationSpec ); - return result as PipelineOperationsGetPipelineResponse; + return result as PipelineGetPipelineResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -254,7 +253,7 @@ export class PipelineOperationsImpl implements PipelineOperations { */ async beginDeletePipeline( pipelineName: string, - options?: PipelineOperationsDeletePipelineOptionalParams + options?: PipelineDeletePipelineOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginDeletePipeline", @@ -328,7 +327,7 @@ export class PipelineOperationsImpl implements PipelineOperations { */ async beginDeletePipelineAndWait( pipelineName: string, - options?: PipelineOperationsDeletePipelineOptionalParams + options?: PipelineDeletePipelineOptionalParams ): Promise { const poller = await this.beginDeletePipeline(pipelineName, options); return poller.pollUntilDone(); @@ -343,7 +342,7 @@ export class PipelineOperationsImpl implements PipelineOperations { async beginRenamePipeline( pipelineName: string, request: ArtifactRenameRequest, - options?: PipelineOperationsRenamePipelineOptionalParams + options?: PipelineRenamePipelineOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginRenamePipeline", @@ -419,7 +418,7 @@ export class PipelineOperationsImpl implements PipelineOperations { async beginRenamePipelineAndWait( pipelineName: string, request: ArtifactRenameRequest, - options?: PipelineOperationsRenamePipelineOptionalParams + options?: PipelineRenamePipelineOptionalParams ): Promise { const poller = await this.beginRenamePipeline( pipelineName, @@ -436,8 +435,8 @@ export class PipelineOperationsImpl implements PipelineOperations { */ async createPipelineRun( pipelineName: string, - options?: PipelineOperationsCreatePipelineRunOptionalParams - ): Promise { + options?: PipelineCreatePipelineRunOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-createPipelineRun", options || {} @@ -447,7 +446,7 @@ export class PipelineOperationsImpl implements PipelineOperations { { pipelineName, options }, createPipelineRunOperationSpec ); - return result as PipelineOperationsCreatePipelineRunResponse; + return result as PipelineCreatePipelineRunResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -467,8 +466,8 @@ export class PipelineOperationsImpl implements PipelineOperations { */ private async _getPipelinesByWorkspaceNext( nextLink: string, - options?: PipelineOperationsGetPipelinesByWorkspaceNextOptionalParams - ): Promise { + options?: PipelineGetPipelinesByWorkspaceNextOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getPipelinesByWorkspaceNext", options || {} @@ -478,7 +477,7 @@ export class PipelineOperationsImpl implements PipelineOperations { { nextLink, options }, getPipelinesByWorkspaceNextOperationSpec ); - return result as PipelineOperationsGetPipelinesByWorkspaceNextResponse; + return result as PipelineGetPipelinesByWorkspaceNextResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -501,10 +500,10 @@ const getPipelinesByWorkspaceOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.PipelineListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -526,11 +525,11 @@ const createOrUpdatePipelineOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.PipelineResource }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.pipeline, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.pipelineName], headerParameters: [ Parameters.accept, @@ -549,10 +548,10 @@ const getPipelineOperationSpec: coreClient.OperationSpec = { }, 304: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.pipelineName], headerParameters: [Parameters.accept, Parameters.ifNoneMatch], serializer @@ -566,10 +565,10 @@ const deletePipelineOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.pipelineName], headerParameters: [Parameters.accept], serializer @@ -583,11 +582,11 @@ const renamePipelineOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.request, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.pipelineName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -601,12 +600,12 @@ const createPipelineRunOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.CreateRunResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.parameters, queryParameters: [ - Parameters.apiVersion, + Parameters.apiVersion1, Parameters.referencePipelineRunId, Parameters.isRecovery, Parameters.startActivityName @@ -624,10 +623,10 @@ const getPipelinesByWorkspaceNextOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.PipelineListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.nextLink], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRunOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRunOperations.ts index 20e675915d45..952f86ff683b 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipelineRunOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRunOperations.ts @@ -15,16 +15,16 @@ import * as Parameters from "../models/parameters"; import { ArtifactsClientContext } from "../artifactsClientContext"; import { RunFilterParameters, - PipelineRunOperationsQueryPipelineRunsByWorkspaceOptionalParams, - PipelineRunOperationsQueryPipelineRunsByWorkspaceResponse, - PipelineRunOperationsGetPipelineRunOptionalParams, - PipelineRunOperationsGetPipelineRunResponse, - PipelineRunOperationsQueryActivityRunsOptionalParams, - PipelineRunOperationsQueryActivityRunsResponse, - PipelineRunOperationsCancelPipelineRunOptionalParams + PipelineRunQueryPipelineRunsByWorkspaceOptionalParams, + PipelineRunQueryPipelineRunsByWorkspaceResponse, + PipelineRunGetPipelineRunOptionalParams, + PipelineRunGetPipelineRunResponse, + PipelineRunQueryActivityRunsOptionalParams, + PipelineRunQueryActivityRunsResponse, + PipelineRunCancelPipelineRunOptionalParams } from "../models"; -/** Class representing a PipelineRunOperations. */ +/** Class containing PipelineRunOperations operations. */ export class PipelineRunOperationsImpl implements PipelineRunOperations { private readonly client: ArtifactsClientContext; @@ -43,8 +43,8 @@ export class PipelineRunOperationsImpl implements PipelineRunOperations { */ async queryPipelineRunsByWorkspace( filterParameters: RunFilterParameters, - options?: PipelineRunOperationsQueryPipelineRunsByWorkspaceOptionalParams - ): Promise { + options?: PipelineRunQueryPipelineRunsByWorkspaceOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-queryPipelineRunsByWorkspace", options || {} @@ -54,7 +54,7 @@ export class PipelineRunOperationsImpl implements PipelineRunOperations { { filterParameters, options }, queryPipelineRunsByWorkspaceOperationSpec ); - return result as PipelineRunOperationsQueryPipelineRunsByWorkspaceResponse; + return result as PipelineRunQueryPipelineRunsByWorkspaceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -73,8 +73,8 @@ export class PipelineRunOperationsImpl implements PipelineRunOperations { */ async getPipelineRun( runId: string, - options?: PipelineRunOperationsGetPipelineRunOptionalParams - ): Promise { + options?: PipelineRunGetPipelineRunOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-getPipelineRun", options || {} @@ -84,7 +84,7 @@ export class PipelineRunOperationsImpl implements PipelineRunOperations { { runId, options }, getPipelineRunOperationSpec ); - return result as PipelineRunOperationsGetPipelineRunResponse; + return result as PipelineRunGetPipelineRunResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -107,8 +107,8 @@ export class PipelineRunOperationsImpl implements PipelineRunOperations { pipelineName: string, runId: string, filterParameters: RunFilterParameters, - options?: PipelineRunOperationsQueryActivityRunsOptionalParams - ): Promise { + options?: PipelineRunQueryActivityRunsOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-queryActivityRuns", options || {} @@ -118,7 +118,7 @@ export class PipelineRunOperationsImpl implements PipelineRunOperations { { pipelineName, runId, filterParameters, options }, queryActivityRunsOperationSpec ); - return result as PipelineRunOperationsQueryActivityRunsResponse; + return result as PipelineRunQueryActivityRunsResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -137,7 +137,7 @@ export class PipelineRunOperationsImpl implements PipelineRunOperations { */ async cancelPipelineRun( runId: string, - options?: PipelineRunOperationsCancelPipelineRunOptionalParams + options?: PipelineRunCancelPipelineRunOptionalParams ): Promise { const { span } = createSpan( "ArtifactsClient-cancelPipelineRun", @@ -171,11 +171,11 @@ const queryPipelineRunsByWorkspaceOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.PipelineRunsQueryResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.filterParameters, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -189,10 +189,10 @@ const getPipelineRunOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.PipelineRun }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.runId], headerParameters: [Parameters.accept], serializer @@ -205,11 +205,11 @@ const queryActivityRunsOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.ActivityRunsQueryResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.filterParameters, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [ Parameters.endpoint, Parameters.pipelineName, @@ -225,10 +225,10 @@ const cancelPipelineRunOperationSpec: coreClient.OperationSpec = { responses: { 200: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion, Parameters.isRecursive], + queryParameters: [Parameters.apiVersion1, Parameters.isRecursive], urlParameters: [Parameters.endpoint, Parameters.runId], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkConfigurationOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkConfigurationOperations.ts new file mode 100644 index 000000000000..3cf210a2807e --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkConfigurationOperations.ts @@ -0,0 +1,589 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import { createSpan } from "../tracing"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { SparkConfigurationOperations } from "../operationsInterfaces"; +import * as coreClient from "@azure/core-client"; +import * as coreTracing from "@azure/core-tracing"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClientContext } from "../artifactsClientContext"; +import { PollerLike, PollOperationState, LroEngine } from "@azure/core-lro"; +import { LroImpl } from "../lroImpl"; +import { + SparkConfigurationResource, + SparkConfigurationGetSparkConfigurationsByWorkspaceNextOptionalParams, + SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams, + SparkConfigurationGetSparkConfigurationsByWorkspaceResponse, + SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams, + SparkConfigurationCreateOrUpdateSparkConfigurationResponse, + SparkConfigurationGetSparkConfigurationOptionalParams, + SparkConfigurationGetSparkConfigurationResponse, + SparkConfigurationDeleteSparkConfigurationOptionalParams, + ArtifactRenameRequest, + SparkConfigurationRenameSparkConfigurationOptionalParams, + SparkConfigurationGetSparkConfigurationsByWorkspaceNextResponse +} from "../models"; + +/// +/** Class containing SparkConfigurationOperations operations. */ +export class SparkConfigurationOperationsImpl + implements SparkConfigurationOperations { + private readonly client: ArtifactsClientContext; + + /** + * Initialize a new instance of the class SparkConfigurationOperations class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClientContext) { + this.client = client; + } + + /** + * Lists sparkconfigurations. + * @param options The options parameters. + */ + public listSparkConfigurationsByWorkspace( + options?: SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams + ): PagedAsyncIterableIterator { + const iter = this.getSparkConfigurationsByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getSparkConfigurationsByWorkspacePagingPage(options); + } + }; + } + + private async *getSparkConfigurationsByWorkspacePagingPage( + options?: SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams + ): AsyncIterableIterator { + let result = await this._getSparkConfigurationsByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getSparkConfigurationsByWorkspaceNext( + continuationToken, + options + ); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getSparkConfigurationsByWorkspacePagingAll( + options?: SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams + ): AsyncIterableIterator { + for await (const page of this.getSparkConfigurationsByWorkspacePagingPage( + options + )) { + yield* page; + } + } + + /** + * Lists sparkconfigurations. + * @param options The options parameters. + */ + private async _getSparkConfigurationsByWorkspace( + options?: SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams + ): Promise { + const { span } = createSpan( + "ArtifactsClient-_getSparkConfigurationsByWorkspace", + options || {} + ); + try { + const result = await this.client.sendOperationRequest( + { options }, + getSparkConfigurationsByWorkspaceOperationSpec + ); + return result as SparkConfigurationGetSparkConfigurationsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Creates or updates a sparkconfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param sparkConfiguration SparkConfiguration resource definition. + * @param options The options parameters. + */ + async beginCreateOrUpdateSparkConfiguration( + sparkConfigurationName: string, + sparkConfiguration: SparkConfigurationResource, + options?: SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams + ): Promise< + PollerLike< + PollOperationState< + SparkConfigurationCreateOrUpdateSparkConfigurationResponse + >, + SparkConfigurationCreateOrUpdateSparkConfigurationResponse + > + > { + const { span } = createSpan( + "ArtifactsClient-beginCreateOrUpdateSparkConfiguration", + options || {} + ); + const directSendOperation = async ( + args: coreClient.OperationArguments, + spec: coreClient.OperationSpec + ): Promise => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as SparkConfigurationCreateOrUpdateSparkConfigurationResponse; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + const sendOperation = async ( + args: coreClient.OperationArguments, + spec: coreClient.OperationSpec + ) => { + let currentRawResponse: + | coreClient.FullOperationResponse + | undefined = undefined; + const providedCallback = args.options?.onResponse; + const callback: coreClient.RawResponseCallback = ( + rawResponse: coreClient.FullOperationResponse, + flatResponse: unknown + ) => { + currentRawResponse = rawResponse; + providedCallback?.(rawResponse, flatResponse); + }; + const updatedArgs = { + ...args, + options: { + ...args.options, + onResponse: callback + } + }; + const flatResponse = await directSendOperation(updatedArgs, spec); + return { + flatResponse, + rawResponse: { + statusCode: currentRawResponse!.status, + body: currentRawResponse!.parsedBody, + headers: currentRawResponse!.headers.toJSON() + } + }; + }; + + const lro = new LroImpl( + sendOperation, + { sparkConfigurationName, sparkConfiguration, options }, + createOrUpdateSparkConfigurationOperationSpec + ); + return new LroEngine(lro, { + resumeFrom: options?.resumeFrom, + intervalInMs: options?.updateIntervalInMs + }); + } + + /** + * Creates or updates a sparkconfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param sparkConfiguration SparkConfiguration resource definition. + * @param options The options parameters. + */ + async beginCreateOrUpdateSparkConfigurationAndWait( + sparkConfigurationName: string, + sparkConfiguration: SparkConfigurationResource, + options?: SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams + ): Promise { + const poller = await this.beginCreateOrUpdateSparkConfiguration( + sparkConfigurationName, + sparkConfiguration, + options + ); + return poller.pollUntilDone(); + } + + /** + * Gets a sparkConfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param options The options parameters. + */ + async getSparkConfiguration( + sparkConfigurationName: string, + options?: SparkConfigurationGetSparkConfigurationOptionalParams + ): Promise { + const { span } = createSpan( + "ArtifactsClient-getSparkConfiguration", + options || {} + ); + try { + const result = await this.client.sendOperationRequest( + { sparkConfigurationName, options }, + getSparkConfigurationOperationSpec + ); + return result as SparkConfigurationGetSparkConfigurationResponse; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Deletes a sparkConfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param options The options parameters. + */ + async beginDeleteSparkConfiguration( + sparkConfigurationName: string, + options?: SparkConfigurationDeleteSparkConfigurationOptionalParams + ): Promise, void>> { + const { span } = createSpan( + "ArtifactsClient-beginDeleteSparkConfiguration", + options || {} + ); + const directSendOperation = async ( + args: coreClient.OperationArguments, + spec: coreClient.OperationSpec + ): Promise => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as void; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + const sendOperation = async ( + args: coreClient.OperationArguments, + spec: coreClient.OperationSpec + ) => { + let currentRawResponse: + | coreClient.FullOperationResponse + | undefined = undefined; + const providedCallback = args.options?.onResponse; + const callback: coreClient.RawResponseCallback = ( + rawResponse: coreClient.FullOperationResponse, + flatResponse: unknown + ) => { + currentRawResponse = rawResponse; + providedCallback?.(rawResponse, flatResponse); + }; + const updatedArgs = { + ...args, + options: { + ...args.options, + onResponse: callback + } + }; + const flatResponse = await directSendOperation(updatedArgs, spec); + return { + flatResponse, + rawResponse: { + statusCode: currentRawResponse!.status, + body: currentRawResponse!.parsedBody, + headers: currentRawResponse!.headers.toJSON() + } + }; + }; + + const lro = new LroImpl( + sendOperation, + { sparkConfigurationName, options }, + deleteSparkConfigurationOperationSpec + ); + return new LroEngine(lro, { + resumeFrom: options?.resumeFrom, + intervalInMs: options?.updateIntervalInMs + }); + } + + /** + * Deletes a sparkConfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param options The options parameters. + */ + async beginDeleteSparkConfigurationAndWait( + sparkConfigurationName: string, + options?: SparkConfigurationDeleteSparkConfigurationOptionalParams + ): Promise { + const poller = await this.beginDeleteSparkConfiguration( + sparkConfigurationName, + options + ); + return poller.pollUntilDone(); + } + + /** + * Renames a sparkConfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param request proposed new name. + * @param options The options parameters. + */ + async beginRenameSparkConfiguration( + sparkConfigurationName: string, + request: ArtifactRenameRequest, + options?: SparkConfigurationRenameSparkConfigurationOptionalParams + ): Promise, void>> { + const { span } = createSpan( + "ArtifactsClient-beginRenameSparkConfiguration", + options || {} + ); + const directSendOperation = async ( + args: coreClient.OperationArguments, + spec: coreClient.OperationSpec + ): Promise => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as void; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + const sendOperation = async ( + args: coreClient.OperationArguments, + spec: coreClient.OperationSpec + ) => { + let currentRawResponse: + | coreClient.FullOperationResponse + | undefined = undefined; + const providedCallback = args.options?.onResponse; + const callback: coreClient.RawResponseCallback = ( + rawResponse: coreClient.FullOperationResponse, + flatResponse: unknown + ) => { + currentRawResponse = rawResponse; + providedCallback?.(rawResponse, flatResponse); + }; + const updatedArgs = { + ...args, + options: { + ...args.options, + onResponse: callback + } + }; + const flatResponse = await directSendOperation(updatedArgs, spec); + return { + flatResponse, + rawResponse: { + statusCode: currentRawResponse!.status, + body: currentRawResponse!.parsedBody, + headers: currentRawResponse!.headers.toJSON() + } + }; + }; + + const lro = new LroImpl( + sendOperation, + { sparkConfigurationName, request, options }, + renameSparkConfigurationOperationSpec + ); + return new LroEngine(lro, { + resumeFrom: options?.resumeFrom, + intervalInMs: options?.updateIntervalInMs + }); + } + + /** + * Renames a sparkConfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param request proposed new name. + * @param options The options parameters. + */ + async beginRenameSparkConfigurationAndWait( + sparkConfigurationName: string, + request: ArtifactRenameRequest, + options?: SparkConfigurationRenameSparkConfigurationOptionalParams + ): Promise { + const poller = await this.beginRenameSparkConfiguration( + sparkConfigurationName, + request, + options + ); + return poller.pollUntilDone(); + } + + /** + * GetSparkConfigurationsByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the + * GetSparkConfigurationsByWorkspace method. + * @param options The options parameters. + */ + private async _getSparkConfigurationsByWorkspaceNext( + nextLink: string, + options?: SparkConfigurationGetSparkConfigurationsByWorkspaceNextOptionalParams + ): Promise { + const { span } = createSpan( + "ArtifactsClient-_getSparkConfigurationsByWorkspaceNext", + options || {} + ); + try { + const result = await this.client.sendOperationRequest( + { nextLink, options }, + getSparkConfigurationsByWorkspaceNextOperationSpec + ); + return result as SparkConfigurationGetSparkConfigurationsByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: coreTracing.SpanStatusCode.UNSET, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications +const serializer = coreClient.createSerializer(Mappers, /* isXml */ false); + +const getSparkConfigurationsByWorkspaceOperationSpec: coreClient.OperationSpec = { + path: "/sparkconfigurations", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkConfigurationListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateSparkConfigurationOperationSpec: coreClient.OperationSpec = { + path: "/sparkconfigurations/{sparkConfigurationName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.SparkConfigurationResource + }, + 201: { + bodyMapper: Mappers.SparkConfigurationResource + }, + 202: { + bodyMapper: Mappers.SparkConfigurationResource + }, + 204: { + bodyMapper: Mappers.SparkConfigurationResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.sparkConfiguration, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkConfigurationName], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], + mediaType: "json", + serializer +}; +const getSparkConfigurationOperationSpec: coreClient.OperationSpec = { + path: "/sparkconfigurations/{sparkConfigurationName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkConfigurationResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkConfigurationName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteSparkConfigurationOperationSpec: coreClient.OperationSpec = { + path: "/sparkconfigurations/{sparkConfigurationName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkConfigurationName], + headerParameters: [Parameters.accept], + serializer +}; +const renameSparkConfigurationOperationSpec: coreClient.OperationSpec = { + path: "/sparkconfigurations/{sparkConfigurationName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkConfigurationName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getSparkConfigurationsByWorkspaceNextOperationSpec: coreClient.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkConfigurationListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinitionOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinitionOperations.ts index ddf6982b3399..e164fe82fb36 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinitionOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinitionOperations.ts @@ -7,7 +7,6 @@ */ import { createSpan } from "../tracing"; -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { SparkJobDefinitionOperations } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; @@ -19,25 +18,25 @@ import { PollerLike, PollOperationState, LroEngine } from "@azure/core-lro"; import { LroImpl } from "../lroImpl"; import { SparkJobDefinitionResource, - SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceNextOptionalParams, - SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceOptionalParams, - SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceResponse, - SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionOptionalParams, - SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionResponse, - SparkJobDefinitionOperationsGetSparkJobDefinitionOptionalParams, - SparkJobDefinitionOperationsGetSparkJobDefinitionResponse, - SparkJobDefinitionOperationsDeleteSparkJobDefinitionOptionalParams, - SparkJobDefinitionOperationsExecuteSparkJobDefinitionOptionalParams, - SparkJobDefinitionOperationsExecuteSparkJobDefinitionResponse, + SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextOptionalParams, + SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceOptionalParams, + SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse, + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams, + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse, + SparkJobDefinitionGetSparkJobDefinitionOptionalParams, + SparkJobDefinitionGetSparkJobDefinitionResponse, + SparkJobDefinitionDeleteSparkJobDefinitionOptionalParams, + SparkJobDefinitionExecuteSparkJobDefinitionOptionalParams, + SparkJobDefinitionExecuteSparkJobDefinitionResponse, ArtifactRenameRequest, - SparkJobDefinitionOperationsRenameSparkJobDefinitionOptionalParams, - SparkJobDefinitionOperationsDebugSparkJobDefinitionOptionalParams, - SparkJobDefinitionOperationsDebugSparkJobDefinitionResponse, - SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceNextResponse + SparkJobDefinitionRenameSparkJobDefinitionOptionalParams, + SparkJobDefinitionDebugSparkJobDefinitionOptionalParams, + SparkJobDefinitionDebugSparkJobDefinitionResponse, + SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse } from "../models"; /// -/** Class representing a SparkJobDefinitionOperations. */ +/** Class containing SparkJobDefinitionOperations operations. */ export class SparkJobDefinitionOperationsImpl implements SparkJobDefinitionOperations { private readonly client: ArtifactsClientContext; @@ -55,7 +54,7 @@ export class SparkJobDefinitionOperationsImpl * @param options The options parameters. */ public listSparkJobDefinitionsByWorkspace( - options?: SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceOptionalParams + options?: SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceOptionalParams ): PagedAsyncIterableIterator { const iter = this.getSparkJobDefinitionsByWorkspacePagingAll(options); return { @@ -72,7 +71,7 @@ export class SparkJobDefinitionOperationsImpl } private async *getSparkJobDefinitionsByWorkspacePagingPage( - options?: SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceOptionalParams + options?: SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceOptionalParams ): AsyncIterableIterator { let result = await this._getSparkJobDefinitionsByWorkspace(options); yield result.value || []; @@ -88,7 +87,7 @@ export class SparkJobDefinitionOperationsImpl } private async *getSparkJobDefinitionsByWorkspacePagingAll( - options?: SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceOptionalParams + options?: SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceOptionalParams ): AsyncIterableIterator { for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage( options @@ -102,10 +101,8 @@ export class SparkJobDefinitionOperationsImpl * @param options The options parameters. */ private async _getSparkJobDefinitionsByWorkspace( - options?: SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceOptionalParams - ): Promise< - SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceResponse - > { + options?: SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getSparkJobDefinitionsByWorkspace", options || {} @@ -115,7 +112,7 @@ export class SparkJobDefinitionOperationsImpl { options }, getSparkJobDefinitionsByWorkspaceOperationSpec ); - return result as SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceResponse; + return result as SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -136,13 +133,13 @@ export class SparkJobDefinitionOperationsImpl async beginCreateOrUpdateSparkJobDefinition( sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, - options?: SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams ): Promise< PollerLike< PollOperationState< - SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionResponse + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse >, - SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionResponse + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse > > { const { span } = createSpan( @@ -152,10 +149,10 @@ export class SparkJobDefinitionOperationsImpl const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec - ): Promise => { + ): Promise => { try { const result = await this.client.sendOperationRequest(args, spec); - return result as SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionResponse; + return result as SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -219,10 +216,8 @@ export class SparkJobDefinitionOperationsImpl async beginCreateOrUpdateSparkJobDefinitionAndWait( sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, - options?: SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionOptionalParams - ): Promise< - SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionResponse - > { + options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams + ): Promise { const poller = await this.beginCreateOrUpdateSparkJobDefinition( sparkJobDefinitionName, sparkJobDefinition, @@ -238,8 +233,8 @@ export class SparkJobDefinitionOperationsImpl */ async getSparkJobDefinition( sparkJobDefinitionName: string, - options?: SparkJobDefinitionOperationsGetSparkJobDefinitionOptionalParams - ): Promise { + options?: SparkJobDefinitionGetSparkJobDefinitionOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-getSparkJobDefinition", options || {} @@ -249,7 +244,7 @@ export class SparkJobDefinitionOperationsImpl { sparkJobDefinitionName, options }, getSparkJobDefinitionOperationSpec ); - return result as SparkJobDefinitionOperationsGetSparkJobDefinitionResponse; + return result as SparkJobDefinitionGetSparkJobDefinitionResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -268,7 +263,7 @@ export class SparkJobDefinitionOperationsImpl */ async beginDeleteSparkJobDefinition( sparkJobDefinitionName: string, - options?: SparkJobDefinitionOperationsDeleteSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionDeleteSparkJobDefinitionOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginDeleteSparkJobDefinition", @@ -342,7 +337,7 @@ export class SparkJobDefinitionOperationsImpl */ async beginDeleteSparkJobDefinitionAndWait( sparkJobDefinitionName: string, - options?: SparkJobDefinitionOperationsDeleteSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionDeleteSparkJobDefinitionOptionalParams ): Promise { const poller = await this.beginDeleteSparkJobDefinition( sparkJobDefinitionName, @@ -358,13 +353,11 @@ export class SparkJobDefinitionOperationsImpl */ async beginExecuteSparkJobDefinition( sparkJobDefinitionName: string, - options?: SparkJobDefinitionOperationsExecuteSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionExecuteSparkJobDefinitionOptionalParams ): Promise< PollerLike< - PollOperationState< - SparkJobDefinitionOperationsExecuteSparkJobDefinitionResponse - >, - SparkJobDefinitionOperationsExecuteSparkJobDefinitionResponse + PollOperationState, + SparkJobDefinitionExecuteSparkJobDefinitionResponse > > { const { span } = createSpan( @@ -374,10 +367,10 @@ export class SparkJobDefinitionOperationsImpl const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec - ): Promise => { + ): Promise => { try { const result = await this.client.sendOperationRequest(args, spec); - return result as SparkJobDefinitionOperationsExecuteSparkJobDefinitionResponse; + return result as SparkJobDefinitionExecuteSparkJobDefinitionResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -440,8 +433,8 @@ export class SparkJobDefinitionOperationsImpl */ async beginExecuteSparkJobDefinitionAndWait( sparkJobDefinitionName: string, - options?: SparkJobDefinitionOperationsExecuteSparkJobDefinitionOptionalParams - ): Promise { + options?: SparkJobDefinitionExecuteSparkJobDefinitionOptionalParams + ): Promise { const poller = await this.beginExecuteSparkJobDefinition( sparkJobDefinitionName, options @@ -458,7 +451,7 @@ export class SparkJobDefinitionOperationsImpl async beginRenameSparkJobDefinition( sparkJobDefinitionName: string, request: ArtifactRenameRequest, - options?: SparkJobDefinitionOperationsRenameSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionRenameSparkJobDefinitionOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginRenameSparkJobDefinition", @@ -534,7 +527,7 @@ export class SparkJobDefinitionOperationsImpl async beginRenameSparkJobDefinitionAndWait( sparkJobDefinitionName: string, request: ArtifactRenameRequest, - options?: SparkJobDefinitionOperationsRenameSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionRenameSparkJobDefinitionOptionalParams ): Promise { const poller = await this.beginRenameSparkJobDefinition( sparkJobDefinitionName, @@ -551,13 +544,11 @@ export class SparkJobDefinitionOperationsImpl */ async beginDebugSparkJobDefinition( sparkJobDefinitionAzureResource: SparkJobDefinitionResource, - options?: SparkJobDefinitionOperationsDebugSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionDebugSparkJobDefinitionOptionalParams ): Promise< PollerLike< - PollOperationState< - SparkJobDefinitionOperationsDebugSparkJobDefinitionResponse - >, - SparkJobDefinitionOperationsDebugSparkJobDefinitionResponse + PollOperationState, + SparkJobDefinitionDebugSparkJobDefinitionResponse > > { const { span } = createSpan( @@ -567,10 +558,10 @@ export class SparkJobDefinitionOperationsImpl const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec - ): Promise => { + ): Promise => { try { const result = await this.client.sendOperationRequest(args, spec); - return result as SparkJobDefinitionOperationsDebugSparkJobDefinitionResponse; + return result as SparkJobDefinitionDebugSparkJobDefinitionResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -633,8 +624,8 @@ export class SparkJobDefinitionOperationsImpl */ async beginDebugSparkJobDefinitionAndWait( sparkJobDefinitionAzureResource: SparkJobDefinitionResource, - options?: SparkJobDefinitionOperationsDebugSparkJobDefinitionOptionalParams - ): Promise { + options?: SparkJobDefinitionDebugSparkJobDefinitionOptionalParams + ): Promise { const poller = await this.beginDebugSparkJobDefinition( sparkJobDefinitionAzureResource, options @@ -650,10 +641,8 @@ export class SparkJobDefinitionOperationsImpl */ private async _getSparkJobDefinitionsByWorkspaceNext( nextLink: string, - options?: SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceNextOptionalParams - ): Promise< - SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceNextResponse - > { + options?: SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getSparkJobDefinitionsByWorkspaceNext", options || {} @@ -663,7 +652,7 @@ export class SparkJobDefinitionOperationsImpl { nextLink, options }, getSparkJobDefinitionsByWorkspaceNextOperationSpec ); - return result as SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceNextResponse; + return result as SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -686,10 +675,10 @@ const getSparkJobDefinitionsByWorkspaceOperationSpec: coreClient.OperationSpec = bodyMapper: Mappers.SparkJobDefinitionsListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -711,11 +700,11 @@ const createOrUpdateSparkJobDefinitionOperationSpec: coreClient.OperationSpec = bodyMapper: Mappers.SparkJobDefinitionResource }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.sparkJobDefinition, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], headerParameters: [ Parameters.accept, @@ -734,10 +723,10 @@ const getSparkJobDefinitionOperationSpec: coreClient.OperationSpec = { }, 304: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], headerParameters: [Parameters.accept, Parameters.ifNoneMatch], serializer @@ -751,10 +740,10 @@ const deleteSparkJobDefinitionOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], headerParameters: [Parameters.accept], serializer @@ -776,10 +765,10 @@ const executeSparkJobDefinitionOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.SparkBatchJob }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], headerParameters: [Parameters.accept], serializer @@ -793,11 +782,11 @@ const renameSparkJobDefinitionOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.request, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -820,11 +809,11 @@ const debugSparkJobDefinitionOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.SparkBatchJob }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.sparkJobDefinitionAzureResource, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -838,10 +827,10 @@ const getSparkJobDefinitionsByWorkspaceNextOperationSpec: coreClient.OperationSp bodyMapper: Mappers.SparkJobDefinitionsListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.nextLink], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts index 04e49084ded1..f6ac8911cb69 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -20,7 +20,7 @@ import { SqlPoolsGetResponse } from "../models"; -/** Class representing a SqlPools. */ +/** Class containing SqlPools operations. */ export class SqlPoolsImpl implements SqlPools { private readonly client: ArtifactsClientContext; @@ -98,7 +98,7 @@ const listOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -114,7 +114,7 @@ const getOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.sqlPoolName], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScriptOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScriptOperations.ts index f7353615eb5a..25ab8d10ede4 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlScriptOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScriptOperations.ts @@ -7,7 +7,6 @@ */ import { createSpan } from "../tracing"; -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { SqlScriptOperations } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; @@ -19,21 +18,21 @@ import { PollerLike, PollOperationState, LroEngine } from "@azure/core-lro"; import { LroImpl } from "../lroImpl"; import { SqlScriptResource, - SqlScriptOperationsGetSqlScriptsByWorkspaceNextOptionalParams, - SqlScriptOperationsGetSqlScriptsByWorkspaceOptionalParams, - SqlScriptOperationsGetSqlScriptsByWorkspaceResponse, - SqlScriptOperationsCreateOrUpdateSqlScriptOptionalParams, - SqlScriptOperationsCreateOrUpdateSqlScriptResponse, - SqlScriptOperationsGetSqlScriptOptionalParams, - SqlScriptOperationsGetSqlScriptResponse, - SqlScriptOperationsDeleteSqlScriptOptionalParams, + SqlScriptGetSqlScriptsByWorkspaceNextOptionalParams, + SqlScriptGetSqlScriptsByWorkspaceOptionalParams, + SqlScriptGetSqlScriptsByWorkspaceResponse, + SqlScriptCreateOrUpdateSqlScriptOptionalParams, + SqlScriptCreateOrUpdateSqlScriptResponse, + SqlScriptGetSqlScriptOptionalParams, + SqlScriptGetSqlScriptResponse, + SqlScriptDeleteSqlScriptOptionalParams, ArtifactRenameRequest, - SqlScriptOperationsRenameSqlScriptOptionalParams, - SqlScriptOperationsGetSqlScriptsByWorkspaceNextResponse + SqlScriptRenameSqlScriptOptionalParams, + SqlScriptGetSqlScriptsByWorkspaceNextResponse } from "../models"; /// -/** Class representing a SqlScriptOperations. */ +/** Class containing SqlScriptOperations operations. */ export class SqlScriptOperationsImpl implements SqlScriptOperations { private readonly client: ArtifactsClientContext; @@ -50,7 +49,7 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { * @param options The options parameters. */ public listSqlScriptsByWorkspace( - options?: SqlScriptOperationsGetSqlScriptsByWorkspaceOptionalParams + options?: SqlScriptGetSqlScriptsByWorkspaceOptionalParams ): PagedAsyncIterableIterator { const iter = this.getSqlScriptsByWorkspacePagingAll(options); return { @@ -67,7 +66,7 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { } private async *getSqlScriptsByWorkspacePagingPage( - options?: SqlScriptOperationsGetSqlScriptsByWorkspaceOptionalParams + options?: SqlScriptGetSqlScriptsByWorkspaceOptionalParams ): AsyncIterableIterator { let result = await this._getSqlScriptsByWorkspace(options); yield result.value || []; @@ -83,7 +82,7 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { } private async *getSqlScriptsByWorkspacePagingAll( - options?: SqlScriptOperationsGetSqlScriptsByWorkspaceOptionalParams + options?: SqlScriptGetSqlScriptsByWorkspaceOptionalParams ): AsyncIterableIterator { for await (const page of this.getSqlScriptsByWorkspacePagingPage(options)) { yield* page; @@ -95,8 +94,8 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { * @param options The options parameters. */ private async _getSqlScriptsByWorkspace( - options?: SqlScriptOperationsGetSqlScriptsByWorkspaceOptionalParams - ): Promise { + options?: SqlScriptGetSqlScriptsByWorkspaceOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getSqlScriptsByWorkspace", options || {} @@ -106,7 +105,7 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { { options }, getSqlScriptsByWorkspaceOperationSpec ); - return result as SqlScriptOperationsGetSqlScriptsByWorkspaceResponse; + return result as SqlScriptGetSqlScriptsByWorkspaceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -127,11 +126,11 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { async beginCreateOrUpdateSqlScript( sqlScriptName: string, sqlScript: SqlScriptResource, - options?: SqlScriptOperationsCreateOrUpdateSqlScriptOptionalParams + options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams ): Promise< PollerLike< - PollOperationState, - SqlScriptOperationsCreateOrUpdateSqlScriptResponse + PollOperationState, + SqlScriptCreateOrUpdateSqlScriptResponse > > { const { span } = createSpan( @@ -141,10 +140,10 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec - ): Promise => { + ): Promise => { try { const result = await this.client.sendOperationRequest(args, spec); - return result as SqlScriptOperationsCreateOrUpdateSqlScriptResponse; + return result as SqlScriptCreateOrUpdateSqlScriptResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -208,8 +207,8 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { async beginCreateOrUpdateSqlScriptAndWait( sqlScriptName: string, sqlScript: SqlScriptResource, - options?: SqlScriptOperationsCreateOrUpdateSqlScriptOptionalParams - ): Promise { + options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams + ): Promise { const poller = await this.beginCreateOrUpdateSqlScript( sqlScriptName, sqlScript, @@ -225,15 +224,15 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { */ async getSqlScript( sqlScriptName: string, - options?: SqlScriptOperationsGetSqlScriptOptionalParams - ): Promise { + options?: SqlScriptGetSqlScriptOptionalParams + ): Promise { const { span } = createSpan("ArtifactsClient-getSqlScript", options || {}); try { const result = await this.client.sendOperationRequest( { sqlScriptName, options }, getSqlScriptOperationSpec ); - return result as SqlScriptOperationsGetSqlScriptResponse; + return result as SqlScriptGetSqlScriptResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -252,7 +251,7 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { */ async beginDeleteSqlScript( sqlScriptName: string, - options?: SqlScriptOperationsDeleteSqlScriptOptionalParams + options?: SqlScriptDeleteSqlScriptOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginDeleteSqlScript", @@ -326,7 +325,7 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { */ async beginDeleteSqlScriptAndWait( sqlScriptName: string, - options?: SqlScriptOperationsDeleteSqlScriptOptionalParams + options?: SqlScriptDeleteSqlScriptOptionalParams ): Promise { const poller = await this.beginDeleteSqlScript(sqlScriptName, options); return poller.pollUntilDone(); @@ -341,7 +340,7 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { async beginRenameSqlScript( sqlScriptName: string, request: ArtifactRenameRequest, - options?: SqlScriptOperationsRenameSqlScriptOptionalParams + options?: SqlScriptRenameSqlScriptOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginRenameSqlScript", @@ -417,7 +416,7 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { async beginRenameSqlScriptAndWait( sqlScriptName: string, request: ArtifactRenameRequest, - options?: SqlScriptOperationsRenameSqlScriptOptionalParams + options?: SqlScriptRenameSqlScriptOptionalParams ): Promise { const poller = await this.beginRenameSqlScript( sqlScriptName, @@ -435,8 +434,8 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { */ private async _getSqlScriptsByWorkspaceNext( nextLink: string, - options?: SqlScriptOperationsGetSqlScriptsByWorkspaceNextOptionalParams - ): Promise { + options?: SqlScriptGetSqlScriptsByWorkspaceNextOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getSqlScriptsByWorkspaceNext", options || {} @@ -446,7 +445,7 @@ export class SqlScriptOperationsImpl implements SqlScriptOperations { { nextLink, options }, getSqlScriptsByWorkspaceNextOperationSpec ); - return result as SqlScriptOperationsGetSqlScriptsByWorkspaceNextResponse; + return result as SqlScriptGetSqlScriptsByWorkspaceNextResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -469,10 +468,10 @@ const getSqlScriptsByWorkspaceOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.SqlScriptsListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -494,11 +493,11 @@ const createOrUpdateSqlScriptOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.SqlScriptResource }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.sqlScript, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], headerParameters: [ Parameters.accept, @@ -517,10 +516,10 @@ const getSqlScriptOperationSpec: coreClient.OperationSpec = { }, 304: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], headerParameters: [Parameters.accept, Parameters.ifNoneMatch], serializer @@ -534,10 +533,10 @@ const deleteSqlScriptOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], headerParameters: [Parameters.accept], serializer @@ -551,11 +550,11 @@ const renameSqlScriptOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.request, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", @@ -569,10 +568,10 @@ const getSqlScriptsByWorkspaceNextOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.SqlScriptsListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.nextLink], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerOperations.ts index 90ca51dcdbaf..30fe7947809c 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/triggerOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerOperations.ts @@ -7,7 +7,6 @@ */ import { createSpan } from "../tracing"; -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { TriggerOperations } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; @@ -19,27 +18,27 @@ import { PollerLike, PollOperationState, LroEngine } from "@azure/core-lro"; import { LroImpl } from "../lroImpl"; import { TriggerResource, - TriggerOperationsGetTriggersByWorkspaceNextOptionalParams, - TriggerOperationsGetTriggersByWorkspaceOptionalParams, - TriggerOperationsGetTriggersByWorkspaceResponse, - TriggerOperationsCreateOrUpdateTriggerOptionalParams, - TriggerOperationsCreateOrUpdateTriggerResponse, - TriggerOperationsGetTriggerOptionalParams, - TriggerOperationsGetTriggerResponse, - TriggerOperationsDeleteTriggerOptionalParams, - TriggerOperationsSubscribeTriggerToEventsOptionalParams, - TriggerOperationsSubscribeTriggerToEventsResponse, - TriggerOperationsGetEventSubscriptionStatusOptionalParams, - TriggerOperationsGetEventSubscriptionStatusResponse, - TriggerOperationsUnsubscribeTriggerFromEventsOptionalParams, - TriggerOperationsUnsubscribeTriggerFromEventsResponse, - TriggerOperationsStartTriggerOptionalParams, - TriggerOperationsStopTriggerOptionalParams, - TriggerOperationsGetTriggersByWorkspaceNextResponse + TriggerGetTriggersByWorkspaceNextOptionalParams, + TriggerGetTriggersByWorkspaceOptionalParams, + TriggerGetTriggersByWorkspaceResponse, + TriggerCreateOrUpdateTriggerOptionalParams, + TriggerCreateOrUpdateTriggerResponse, + TriggerGetTriggerOptionalParams, + TriggerGetTriggerResponse, + TriggerDeleteTriggerOptionalParams, + TriggerSubscribeTriggerToEventsOptionalParams, + TriggerSubscribeTriggerToEventsResponse, + TriggerGetEventSubscriptionStatusOptionalParams, + TriggerGetEventSubscriptionStatusResponse, + TriggerUnsubscribeTriggerFromEventsOptionalParams, + TriggerUnsubscribeTriggerFromEventsResponse, + TriggerStartTriggerOptionalParams, + TriggerStopTriggerOptionalParams, + TriggerGetTriggersByWorkspaceNextResponse } from "../models"; /// -/** Class representing a TriggerOperations. */ +/** Class containing TriggerOperations operations. */ export class TriggerOperationsImpl implements TriggerOperations { private readonly client: ArtifactsClientContext; @@ -56,7 +55,7 @@ export class TriggerOperationsImpl implements TriggerOperations { * @param options The options parameters. */ public listTriggersByWorkspace( - options?: TriggerOperationsGetTriggersByWorkspaceOptionalParams + options?: TriggerGetTriggersByWorkspaceOptionalParams ): PagedAsyncIterableIterator { const iter = this.getTriggersByWorkspacePagingAll(options); return { @@ -73,7 +72,7 @@ export class TriggerOperationsImpl implements TriggerOperations { } private async *getTriggersByWorkspacePagingPage( - options?: TriggerOperationsGetTriggersByWorkspaceOptionalParams + options?: TriggerGetTriggersByWorkspaceOptionalParams ): AsyncIterableIterator { let result = await this._getTriggersByWorkspace(options); yield result.value || []; @@ -89,7 +88,7 @@ export class TriggerOperationsImpl implements TriggerOperations { } private async *getTriggersByWorkspacePagingAll( - options?: TriggerOperationsGetTriggersByWorkspaceOptionalParams + options?: TriggerGetTriggersByWorkspaceOptionalParams ): AsyncIterableIterator { for await (const page of this.getTriggersByWorkspacePagingPage(options)) { yield* page; @@ -101,8 +100,8 @@ export class TriggerOperationsImpl implements TriggerOperations { * @param options The options parameters. */ private async _getTriggersByWorkspace( - options?: TriggerOperationsGetTriggersByWorkspaceOptionalParams - ): Promise { + options?: TriggerGetTriggersByWorkspaceOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getTriggersByWorkspace", options || {} @@ -112,7 +111,7 @@ export class TriggerOperationsImpl implements TriggerOperations { { options }, getTriggersByWorkspaceOperationSpec ); - return result as TriggerOperationsGetTriggersByWorkspaceResponse; + return result as TriggerGetTriggersByWorkspaceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -133,11 +132,11 @@ export class TriggerOperationsImpl implements TriggerOperations { async beginCreateOrUpdateTrigger( triggerName: string, trigger: TriggerResource, - options?: TriggerOperationsCreateOrUpdateTriggerOptionalParams + options?: TriggerCreateOrUpdateTriggerOptionalParams ): Promise< PollerLike< - PollOperationState, - TriggerOperationsCreateOrUpdateTriggerResponse + PollOperationState, + TriggerCreateOrUpdateTriggerResponse > > { const { span } = createSpan( @@ -147,10 +146,10 @@ export class TriggerOperationsImpl implements TriggerOperations { const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec - ): Promise => { + ): Promise => { try { const result = await this.client.sendOperationRequest(args, spec); - return result as TriggerOperationsCreateOrUpdateTriggerResponse; + return result as TriggerCreateOrUpdateTriggerResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -214,8 +213,8 @@ export class TriggerOperationsImpl implements TriggerOperations { async beginCreateOrUpdateTriggerAndWait( triggerName: string, trigger: TriggerResource, - options?: TriggerOperationsCreateOrUpdateTriggerOptionalParams - ): Promise { + options?: TriggerCreateOrUpdateTriggerOptionalParams + ): Promise { const poller = await this.beginCreateOrUpdateTrigger( triggerName, trigger, @@ -231,15 +230,15 @@ export class TriggerOperationsImpl implements TriggerOperations { */ async getTrigger( triggerName: string, - options?: TriggerOperationsGetTriggerOptionalParams - ): Promise { + options?: TriggerGetTriggerOptionalParams + ): Promise { const { span } = createSpan("ArtifactsClient-getTrigger", options || {}); try { const result = await this.client.sendOperationRequest( { triggerName, options }, getTriggerOperationSpec ); - return result as TriggerOperationsGetTriggerResponse; + return result as TriggerGetTriggerResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -258,7 +257,7 @@ export class TriggerOperationsImpl implements TriggerOperations { */ async beginDeleteTrigger( triggerName: string, - options?: TriggerOperationsDeleteTriggerOptionalParams + options?: TriggerDeleteTriggerOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginDeleteTrigger", @@ -332,7 +331,7 @@ export class TriggerOperationsImpl implements TriggerOperations { */ async beginDeleteTriggerAndWait( triggerName: string, - options?: TriggerOperationsDeleteTriggerOptionalParams + options?: TriggerDeleteTriggerOptionalParams ): Promise { const poller = await this.beginDeleteTrigger(triggerName, options); return poller.pollUntilDone(); @@ -345,11 +344,11 @@ export class TriggerOperationsImpl implements TriggerOperations { */ async beginSubscribeTriggerToEvents( triggerName: string, - options?: TriggerOperationsSubscribeTriggerToEventsOptionalParams + options?: TriggerSubscribeTriggerToEventsOptionalParams ): Promise< PollerLike< - PollOperationState, - TriggerOperationsSubscribeTriggerToEventsResponse + PollOperationState, + TriggerSubscribeTriggerToEventsResponse > > { const { span } = createSpan( @@ -359,10 +358,10 @@ export class TriggerOperationsImpl implements TriggerOperations { const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec - ): Promise => { + ): Promise => { try { const result = await this.client.sendOperationRequest(args, spec); - return result as TriggerOperationsSubscribeTriggerToEventsResponse; + return result as TriggerSubscribeTriggerToEventsResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -424,8 +423,8 @@ export class TriggerOperationsImpl implements TriggerOperations { */ async beginSubscribeTriggerToEventsAndWait( triggerName: string, - options?: TriggerOperationsSubscribeTriggerToEventsOptionalParams - ): Promise { + options?: TriggerSubscribeTriggerToEventsOptionalParams + ): Promise { const poller = await this.beginSubscribeTriggerToEvents( triggerName, options @@ -440,8 +439,8 @@ export class TriggerOperationsImpl implements TriggerOperations { */ async getEventSubscriptionStatus( triggerName: string, - options?: TriggerOperationsGetEventSubscriptionStatusOptionalParams - ): Promise { + options?: TriggerGetEventSubscriptionStatusOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-getEventSubscriptionStatus", options || {} @@ -451,7 +450,7 @@ export class TriggerOperationsImpl implements TriggerOperations { { triggerName, options }, getEventSubscriptionStatusOperationSpec ); - return result as TriggerOperationsGetEventSubscriptionStatusResponse; + return result as TriggerGetEventSubscriptionStatusResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -470,11 +469,11 @@ export class TriggerOperationsImpl implements TriggerOperations { */ async beginUnsubscribeTriggerFromEvents( triggerName: string, - options?: TriggerOperationsUnsubscribeTriggerFromEventsOptionalParams + options?: TriggerUnsubscribeTriggerFromEventsOptionalParams ): Promise< PollerLike< - PollOperationState, - TriggerOperationsUnsubscribeTriggerFromEventsResponse + PollOperationState, + TriggerUnsubscribeTriggerFromEventsResponse > > { const { span } = createSpan( @@ -484,10 +483,10 @@ export class TriggerOperationsImpl implements TriggerOperations { const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec - ): Promise => { + ): Promise => { try { const result = await this.client.sendOperationRequest(args, spec); - return result as TriggerOperationsUnsubscribeTriggerFromEventsResponse; + return result as TriggerUnsubscribeTriggerFromEventsResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -549,8 +548,8 @@ export class TriggerOperationsImpl implements TriggerOperations { */ async beginUnsubscribeTriggerFromEventsAndWait( triggerName: string, - options?: TriggerOperationsUnsubscribeTriggerFromEventsOptionalParams - ): Promise { + options?: TriggerUnsubscribeTriggerFromEventsOptionalParams + ): Promise { const poller = await this.beginUnsubscribeTriggerFromEvents( triggerName, options @@ -565,7 +564,7 @@ export class TriggerOperationsImpl implements TriggerOperations { */ async beginStartTrigger( triggerName: string, - options?: TriggerOperationsStartTriggerOptionalParams + options?: TriggerStartTriggerOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginStartTrigger", @@ -639,7 +638,7 @@ export class TriggerOperationsImpl implements TriggerOperations { */ async beginStartTriggerAndWait( triggerName: string, - options?: TriggerOperationsStartTriggerOptionalParams + options?: TriggerStartTriggerOptionalParams ): Promise { const poller = await this.beginStartTrigger(triggerName, options); return poller.pollUntilDone(); @@ -652,7 +651,7 @@ export class TriggerOperationsImpl implements TriggerOperations { */ async beginStopTrigger( triggerName: string, - options?: TriggerOperationsStopTriggerOptionalParams + options?: TriggerStopTriggerOptionalParams ): Promise, void>> { const { span } = createSpan( "ArtifactsClient-beginStopTrigger", @@ -726,7 +725,7 @@ export class TriggerOperationsImpl implements TriggerOperations { */ async beginStopTriggerAndWait( triggerName: string, - options?: TriggerOperationsStopTriggerOptionalParams + options?: TriggerStopTriggerOptionalParams ): Promise { const poller = await this.beginStopTrigger(triggerName, options); return poller.pollUntilDone(); @@ -739,8 +738,8 @@ export class TriggerOperationsImpl implements TriggerOperations { */ private async _getTriggersByWorkspaceNext( nextLink: string, - options?: TriggerOperationsGetTriggersByWorkspaceNextOptionalParams - ): Promise { + options?: TriggerGetTriggersByWorkspaceNextOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-_getTriggersByWorkspaceNext", options || {} @@ -750,7 +749,7 @@ export class TriggerOperationsImpl implements TriggerOperations { { nextLink, options }, getTriggersByWorkspaceNextOperationSpec ); - return result as TriggerOperationsGetTriggersByWorkspaceNextResponse; + return result as TriggerGetTriggersByWorkspaceNextResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -773,10 +772,10 @@ const getTriggersByWorkspaceOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.TriggerListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer @@ -798,11 +797,11 @@ const createOrUpdateTriggerOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.TriggerResource }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.trigger, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.triggerName], headerParameters: [ Parameters.accept, @@ -821,10 +820,10 @@ const getTriggerOperationSpec: coreClient.OperationSpec = { }, 304: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.triggerName], headerParameters: [Parameters.accept, Parameters.ifNoneMatch], serializer @@ -838,10 +837,10 @@ const deleteTriggerOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.triggerName], headerParameters: [Parameters.accept], serializer @@ -863,10 +862,10 @@ const subscribeTriggerToEventsOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.TriggerSubscriptionOperationStatus }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.triggerName], headerParameters: [Parameters.accept], serializer @@ -879,10 +878,10 @@ const getEventSubscriptionStatusOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.TriggerSubscriptionOperationStatus }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.triggerName], headerParameters: [Parameters.accept], serializer @@ -904,10 +903,10 @@ const unsubscribeTriggerFromEventsOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.TriggerSubscriptionOperationStatus }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.triggerName], headerParameters: [Parameters.accept], serializer @@ -921,10 +920,10 @@ const startTriggerOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.triggerName], headerParameters: [Parameters.accept], serializer @@ -938,10 +937,10 @@ const stopTriggerOperationSpec: coreClient.OperationSpec = { 202: {}, 204: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.triggerName], headerParameters: [Parameters.accept], serializer @@ -954,10 +953,10 @@ const getTriggersByWorkspaceNextOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.TriggerListResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint, Parameters.nextLink], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRunOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRunOperations.ts index 4efb6d4b4a47..cdbbf2a5bb8b 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/triggerRunOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRunOperations.ts @@ -14,14 +14,14 @@ import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClientContext } from "../artifactsClientContext"; import { - TriggerRunOperationsRerunTriggerInstanceOptionalParams, - TriggerRunOperationsCancelTriggerInstanceOptionalParams, + TriggerRunRerunTriggerInstanceOptionalParams, + TriggerRunCancelTriggerInstanceOptionalParams, RunFilterParameters, - TriggerRunOperationsQueryTriggerRunsByWorkspaceOptionalParams, - TriggerRunOperationsQueryTriggerRunsByWorkspaceResponse + TriggerRunQueryTriggerRunsByWorkspaceOptionalParams, + TriggerRunQueryTriggerRunsByWorkspaceResponse } from "../models"; -/** Class representing a TriggerRunOperations. */ +/** Class containing TriggerRunOperations operations. */ export class TriggerRunOperationsImpl implements TriggerRunOperations { private readonly client: ArtifactsClientContext; @@ -42,7 +42,7 @@ export class TriggerRunOperationsImpl implements TriggerRunOperations { async rerunTriggerInstance( triggerName: string, runId: string, - options?: TriggerRunOperationsRerunTriggerInstanceOptionalParams + options?: TriggerRunRerunTriggerInstanceOptionalParams ): Promise { const { span } = createSpan( "ArtifactsClient-rerunTriggerInstance", @@ -74,7 +74,7 @@ export class TriggerRunOperationsImpl implements TriggerRunOperations { async cancelTriggerInstance( triggerName: string, runId: string, - options?: TriggerRunOperationsCancelTriggerInstanceOptionalParams + options?: TriggerRunCancelTriggerInstanceOptionalParams ): Promise { const { span } = createSpan( "ArtifactsClient-cancelTriggerInstance", @@ -104,8 +104,8 @@ export class TriggerRunOperationsImpl implements TriggerRunOperations { */ async queryTriggerRunsByWorkspace( filterParameters: RunFilterParameters, - options?: TriggerRunOperationsQueryTriggerRunsByWorkspaceOptionalParams - ): Promise { + options?: TriggerRunQueryTriggerRunsByWorkspaceOptionalParams + ): Promise { const { span } = createSpan( "ArtifactsClient-queryTriggerRunsByWorkspace", options || {} @@ -115,7 +115,7 @@ export class TriggerRunOperationsImpl implements TriggerRunOperations { { filterParameters, options }, queryTriggerRunsByWorkspaceOperationSpec ); - return result as TriggerRunOperationsQueryTriggerRunsByWorkspaceResponse; + return result as TriggerRunQueryTriggerRunsByWorkspaceResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -136,10 +136,10 @@ const rerunTriggerInstanceOperationSpec: coreClient.OperationSpec = { responses: { 200: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [ Parameters.endpoint, Parameters.runId, @@ -154,10 +154,10 @@ const cancelTriggerInstanceOperationSpec: coreClient.OperationSpec = { responses: { 200: {}, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [ Parameters.endpoint, Parameters.runId, @@ -174,11 +174,11 @@ const queryTriggerRunsByWorkspaceOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.TriggerRunsQueryResponse }, default: { - bodyMapper: Mappers.CloudError + bodyMapper: Mappers.CloudErrorAutoGenerated } }, requestBody: Parameters.filterParameters, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts index 91d67b61b950..e4529e4981b5 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -19,7 +19,7 @@ import { WorkspaceGitRepoManagementGetGitHubAccessTokenResponse } from "../models"; -/** Class representing a WorkspaceGitRepoManagement. */ +/** Class containing WorkspaceGitRepoManagement operations. */ export class WorkspaceGitRepoManagementImpl implements WorkspaceGitRepoManagement { private readonly client: ArtifactsClientContext; @@ -74,7 +74,7 @@ const getGitHubAccessTokenOperationSpec: coreClient.OperationSpec = { } }, requestBody: Parameters.gitHubAccessTokenRequest, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [ Parameters.accept, diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceOperations.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceOperations.ts index cc9a8d559438..da2ab96a5e3d 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspaceOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceOperations.ts @@ -13,12 +13,9 @@ import * as coreTracing from "@azure/core-tracing"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClientContext } from "../artifactsClientContext"; -import { - WorkspaceOperationsGetOptionalParams, - WorkspaceOperationsGetResponse -} from "../models"; +import { WorkspaceGetOptionalParams, WorkspaceGetResponse } from "../models"; -/** Class representing a WorkspaceOperations. */ +/** Class containing WorkspaceOperations operations. */ export class WorkspaceOperationsImpl implements WorkspaceOperations { private readonly client: ArtifactsClientContext; @@ -35,15 +32,15 @@ export class WorkspaceOperationsImpl implements WorkspaceOperations { * @param options The options parameters. */ async get( - options?: WorkspaceOperationsGetOptionalParams - ): Promise { + options?: WorkspaceGetOptionalParams + ): Promise { const { span } = createSpan("ArtifactsClient-get", options || {}); try { const result = await this.client.sendOperationRequest( { options }, getOperationSpec ); - return result as WorkspaceOperationsGetResponse; + return result as WorkspaceGetResponse; } catch (error) { span.setStatus({ code: coreTracing.SpanStatusCode.UNSET, @@ -69,7 +66,7 @@ const getOperationSpec: coreClient.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [Parameters.apiVersion], + queryParameters: [Parameters.apiVersion1], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept], serializer diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlowDebugSession.ts index d2875320878b..a300b1e20179 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlowDebugSession.ts @@ -6,7 +6,6 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { PollerLike, PollOperationState } from "@azure/core-lro"; import { diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlowOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlowOperations.ts index cad3f7092679..37a59ccfeb71 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlowOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/dataFlowOperations.ts @@ -6,19 +6,18 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { PollerLike, PollOperationState } from "@azure/core-lro"; import { DataFlowResource, - DataFlowOperationsGetDataFlowsByWorkspaceOptionalParams, - DataFlowOperationsCreateOrUpdateDataFlowOptionalParams, - DataFlowOperationsCreateOrUpdateDataFlowResponse, - DataFlowOperationsGetDataFlowOptionalParams, - DataFlowOperationsGetDataFlowResponse, - DataFlowOperationsDeleteDataFlowOptionalParams, + DataFlowGetDataFlowsByWorkspaceOptionalParams, + DataFlowCreateOrUpdateDataFlowOptionalParams, + DataFlowCreateOrUpdateDataFlowResponse, + DataFlowGetDataFlowOptionalParams, + DataFlowGetDataFlowResponse, + DataFlowDeleteDataFlowOptionalParams, ArtifactRenameRequest, - DataFlowOperationsRenameDataFlowOptionalParams + DataFlowRenameDataFlowOptionalParams } from "../models"; /// @@ -29,7 +28,7 @@ export interface DataFlowOperations { * @param options The options parameters. */ listDataFlowsByWorkspace( - options?: DataFlowOperationsGetDataFlowsByWorkspaceOptionalParams + options?: DataFlowGetDataFlowsByWorkspaceOptionalParams ): PagedAsyncIterableIterator; /** * Creates or updates a data flow. @@ -40,11 +39,11 @@ export interface DataFlowOperations { beginCreateOrUpdateDataFlow( dataFlowName: string, dataFlow: DataFlowResource, - options?: DataFlowOperationsCreateOrUpdateDataFlowOptionalParams + options?: DataFlowCreateOrUpdateDataFlowOptionalParams ): Promise< PollerLike< - PollOperationState, - DataFlowOperationsCreateOrUpdateDataFlowResponse + PollOperationState, + DataFlowCreateOrUpdateDataFlowResponse > >; /** @@ -56,8 +55,8 @@ export interface DataFlowOperations { beginCreateOrUpdateDataFlowAndWait( dataFlowName: string, dataFlow: DataFlowResource, - options?: DataFlowOperationsCreateOrUpdateDataFlowOptionalParams - ): Promise; + options?: DataFlowCreateOrUpdateDataFlowOptionalParams + ): Promise; /** * Gets a data flow. * @param dataFlowName The data flow name. @@ -65,8 +64,8 @@ export interface DataFlowOperations { */ getDataFlow( dataFlowName: string, - options?: DataFlowOperationsGetDataFlowOptionalParams - ): Promise; + options?: DataFlowGetDataFlowOptionalParams + ): Promise; /** * Deletes a data flow. * @param dataFlowName The data flow name. @@ -74,7 +73,7 @@ export interface DataFlowOperations { */ beginDeleteDataFlow( dataFlowName: string, - options?: DataFlowOperationsDeleteDataFlowOptionalParams + options?: DataFlowDeleteDataFlowOptionalParams ): Promise, void>>; /** * Deletes a data flow. @@ -83,7 +82,7 @@ export interface DataFlowOperations { */ beginDeleteDataFlowAndWait( dataFlowName: string, - options?: DataFlowOperationsDeleteDataFlowOptionalParams + options?: DataFlowDeleteDataFlowOptionalParams ): Promise; /** * Renames a dataflow. @@ -94,7 +93,7 @@ export interface DataFlowOperations { beginRenameDataFlow( dataFlowName: string, request: ArtifactRenameRequest, - options?: DataFlowOperationsRenameDataFlowOptionalParams + options?: DataFlowRenameDataFlowOptionalParams ): Promise, void>>; /** * Renames a dataflow. @@ -105,6 +104,6 @@ export interface DataFlowOperations { beginRenameDataFlowAndWait( dataFlowName: string, request: ArtifactRenameRequest, - options?: DataFlowOperationsRenameDataFlowOptionalParams + options?: DataFlowRenameDataFlowOptionalParams ): Promise; } diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/datasetOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/datasetOperations.ts index 0393875f527d..53e00b450cc9 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/datasetOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/datasetOperations.ts @@ -6,19 +6,18 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { PollerLike, PollOperationState } from "@azure/core-lro"; import { DatasetResource, - DatasetOperationsGetDatasetsByWorkspaceOptionalParams, - DatasetOperationsCreateOrUpdateDatasetOptionalParams, - DatasetOperationsCreateOrUpdateDatasetResponse, - DatasetOperationsGetDatasetOptionalParams, - DatasetOperationsGetDatasetResponse, - DatasetOperationsDeleteDatasetOptionalParams, + DatasetGetDatasetsByWorkspaceOptionalParams, + DatasetCreateOrUpdateDatasetOptionalParams, + DatasetCreateOrUpdateDatasetResponse, + DatasetGetDatasetOptionalParams, + DatasetGetDatasetResponse, + DatasetDeleteDatasetOptionalParams, ArtifactRenameRequest, - DatasetOperationsRenameDatasetOptionalParams + DatasetRenameDatasetOptionalParams } from "../models"; /// @@ -29,7 +28,7 @@ export interface DatasetOperations { * @param options The options parameters. */ listDatasetsByWorkspace( - options?: DatasetOperationsGetDatasetsByWorkspaceOptionalParams + options?: DatasetGetDatasetsByWorkspaceOptionalParams ): PagedAsyncIterableIterator; /** * Creates or updates a dataset. @@ -40,11 +39,11 @@ export interface DatasetOperations { beginCreateOrUpdateDataset( datasetName: string, dataset: DatasetResource, - options?: DatasetOperationsCreateOrUpdateDatasetOptionalParams + options?: DatasetCreateOrUpdateDatasetOptionalParams ): Promise< PollerLike< - PollOperationState, - DatasetOperationsCreateOrUpdateDatasetResponse + PollOperationState, + DatasetCreateOrUpdateDatasetResponse > >; /** @@ -56,8 +55,8 @@ export interface DatasetOperations { beginCreateOrUpdateDatasetAndWait( datasetName: string, dataset: DatasetResource, - options?: DatasetOperationsCreateOrUpdateDatasetOptionalParams - ): Promise; + options?: DatasetCreateOrUpdateDatasetOptionalParams + ): Promise; /** * Gets a dataset. * @param datasetName The dataset name. @@ -65,8 +64,8 @@ export interface DatasetOperations { */ getDataset( datasetName: string, - options?: DatasetOperationsGetDatasetOptionalParams - ): Promise; + options?: DatasetGetDatasetOptionalParams + ): Promise; /** * Deletes a dataset. * @param datasetName The dataset name. @@ -74,7 +73,7 @@ export interface DatasetOperations { */ beginDeleteDataset( datasetName: string, - options?: DatasetOperationsDeleteDatasetOptionalParams + options?: DatasetDeleteDatasetOptionalParams ): Promise, void>>; /** * Deletes a dataset. @@ -83,7 +82,7 @@ export interface DatasetOperations { */ beginDeleteDatasetAndWait( datasetName: string, - options?: DatasetOperationsDeleteDatasetOptionalParams + options?: DatasetDeleteDatasetOptionalParams ): Promise; /** * Renames a dataset. @@ -94,7 +93,7 @@ export interface DatasetOperations { beginRenameDataset( datasetName: string, request: ArtifactRenameRequest, - options?: DatasetOperationsRenameDatasetOptionalParams + options?: DatasetRenameDatasetOptionalParams ): Promise, void>>; /** * Renames a dataset. @@ -105,6 +104,6 @@ export interface DatasetOperations { beginRenameDatasetAndWait( datasetName: string, request: ArtifactRenameRequest, - options?: DatasetOperationsRenameDatasetOptionalParams + options?: DatasetRenameDatasetOptionalParams ): Promise; } diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/index.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/index.ts index 44e9f4f8cb66..8fb93b0261f2 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/index.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/index.ts @@ -6,20 +6,24 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -export * from "./linkedServiceOperations"; +export * from "./kqlScripts"; +export * from "./kqlScriptOperations"; +export * from "./sparkConfigurationOperations"; +export * from "./bigDataPools"; +export * from "./dataFlowOperations"; +export * from "./dataFlowDebugSession"; export * from "./datasetOperations"; +export * from "./workspaceGitRepoManagement"; +export * from "./integrationRuntimes"; +export * from "./library"; +export * from "./linkedServiceOperations"; +export * from "./notebookOperations"; +export * from "./notebookOperationResult"; export * from "./pipelineOperations"; export * from "./pipelineRunOperations"; +export * from "./sparkJobDefinitionOperations"; +export * from "./sqlPools"; +export * from "./sqlScriptOperations"; export * from "./triggerOperations"; export * from "./triggerRunOperations"; -export * from "./dataFlowOperations"; -export * from "./dataFlowDebugSession"; -export * from "./sqlScriptOperations"; -export * from "./sparkJobDefinitionOperations"; -export * from "./notebookOperations"; export * from "./workspaceOperations"; -export * from "./sqlPools"; -export * from "./bigDataPools"; -export * from "./integrationRuntimes"; -export * from "./library"; -export * from "./workspaceGitRepoManagement"; diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/kqlScriptOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/kqlScriptOperations.ts new file mode 100644 index 000000000000..405b9fd22b0b --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/kqlScriptOperations.ts @@ -0,0 +1,99 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import { PollerLike, PollOperationState } from "@azure/core-lro"; +import { + KqlScriptResource, + KqlScriptCreateOrUpdateOptionalParams, + KqlScriptCreateOrUpdateResponse, + KqlScriptGetByNameOptionalParams, + KqlScriptGetByNameResponse, + KqlScriptDeleteByNameOptionalParams, + ArtifactRenameRequest, + KqlScriptRenameOptionalParams +} from "../models"; + +/** Interface representing a KqlScriptOperations. */ +export interface KqlScriptOperations { + /** + * Creates or updates a KQL Script + * @param kqlScriptName KQL script name + * @param kqlScript KQL script + * @param options The options parameters. + */ + beginCreateOrUpdate( + kqlScriptName: string, + kqlScript: KqlScriptResource, + options?: KqlScriptCreateOrUpdateOptionalParams + ): Promise< + PollerLike< + PollOperationState, + KqlScriptCreateOrUpdateResponse + > + >; + /** + * Creates or updates a KQL Script + * @param kqlScriptName KQL script name + * @param kqlScript KQL script + * @param options The options parameters. + */ + beginCreateOrUpdateAndWait( + kqlScriptName: string, + kqlScript: KqlScriptResource, + options?: KqlScriptCreateOrUpdateOptionalParams + ): Promise; + /** + * Get KQL script by name + * @param kqlScriptName KQL script name + * @param options The options parameters. + */ + getByName( + kqlScriptName: string, + options?: KqlScriptGetByNameOptionalParams + ): Promise; + /** + * Delete KQL script by name + * @param kqlScriptName KQL script name + * @param options The options parameters. + */ + beginDeleteByName( + kqlScriptName: string, + options?: KqlScriptDeleteByNameOptionalParams + ): Promise, void>>; + /** + * Delete KQL script by name + * @param kqlScriptName KQL script name + * @param options The options parameters. + */ + beginDeleteByNameAndWait( + kqlScriptName: string, + options?: KqlScriptDeleteByNameOptionalParams + ): Promise; + /** + * Rename KQL script + * @param kqlScriptName KQL script name + * @param renameRequest Rename request + * @param options The options parameters. + */ + beginRename( + kqlScriptName: string, + renameRequest: ArtifactRenameRequest, + options?: KqlScriptRenameOptionalParams + ): Promise, void>>; + /** + * Rename KQL script + * @param kqlScriptName KQL script name + * @param renameRequest Rename request + * @param options The options parameters. + */ + beginRenameAndWait( + kqlScriptName: string, + renameRequest: ArtifactRenameRequest, + options?: KqlScriptRenameOptionalParams + ): Promise; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/kqlScripts.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/kqlScripts.ts new file mode 100644 index 000000000000..bacfff4538b3 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/kqlScripts.ts @@ -0,0 +1,22 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { KqlScriptResource, KqlScriptsGetAllOptionalParams } from "../models"; + +/// +/** Interface representing a KqlScripts. */ +export interface KqlScripts { + /** + * Get all KQL scripts + * @param options The options parameters. + */ + listAll( + options?: KqlScriptsGetAllOptionalParams + ): PagedAsyncIterableIterator; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/library.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/library.ts index 91db2d886524..664f100a49fd 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/library.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/library.ts @@ -6,7 +6,6 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import * as coreRestPipeline from "@azure/core-rest-pipeline"; import { PollerLike, PollOperationState } from "@azure/core-lro"; diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/linkedServiceOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/linkedServiceOperations.ts index e11bf9026be1..97a69258b1c1 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/linkedServiceOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/linkedServiceOperations.ts @@ -6,19 +6,18 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { PollerLike, PollOperationState } from "@azure/core-lro"; import { LinkedServiceResource, - LinkedServiceOperationsGetLinkedServicesByWorkspaceOptionalParams, - LinkedServiceOperationsCreateOrUpdateLinkedServiceOptionalParams, - LinkedServiceOperationsCreateOrUpdateLinkedServiceResponse, - LinkedServiceOperationsGetLinkedServiceOptionalParams, - LinkedServiceOperationsGetLinkedServiceResponse, - LinkedServiceOperationsDeleteLinkedServiceOptionalParams, + LinkedServiceGetLinkedServicesByWorkspaceOptionalParams, + LinkedServiceCreateOrUpdateLinkedServiceOptionalParams, + LinkedServiceCreateOrUpdateLinkedServiceResponse, + LinkedServiceGetLinkedServiceOptionalParams, + LinkedServiceGetLinkedServiceResponse, + LinkedServiceDeleteLinkedServiceOptionalParams, ArtifactRenameRequest, - LinkedServiceOperationsRenameLinkedServiceOptionalParams + LinkedServiceRenameLinkedServiceOptionalParams } from "../models"; /// @@ -29,7 +28,7 @@ export interface LinkedServiceOperations { * @param options The options parameters. */ listLinkedServicesByWorkspace( - options?: LinkedServiceOperationsGetLinkedServicesByWorkspaceOptionalParams + options?: LinkedServiceGetLinkedServicesByWorkspaceOptionalParams ): PagedAsyncIterableIterator; /** * Creates or updates a linked service. @@ -40,13 +39,11 @@ export interface LinkedServiceOperations { beginCreateOrUpdateLinkedService( linkedServiceName: string, linkedService: LinkedServiceResource, - options?: LinkedServiceOperationsCreateOrUpdateLinkedServiceOptionalParams + options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams ): Promise< PollerLike< - PollOperationState< - LinkedServiceOperationsCreateOrUpdateLinkedServiceResponse - >, - LinkedServiceOperationsCreateOrUpdateLinkedServiceResponse + PollOperationState, + LinkedServiceCreateOrUpdateLinkedServiceResponse > >; /** @@ -58,8 +55,8 @@ export interface LinkedServiceOperations { beginCreateOrUpdateLinkedServiceAndWait( linkedServiceName: string, linkedService: LinkedServiceResource, - options?: LinkedServiceOperationsCreateOrUpdateLinkedServiceOptionalParams - ): Promise; + options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams + ): Promise; /** * Gets a linked service. * @param linkedServiceName The linked service name. @@ -67,8 +64,8 @@ export interface LinkedServiceOperations { */ getLinkedService( linkedServiceName: string, - options?: LinkedServiceOperationsGetLinkedServiceOptionalParams - ): Promise; + options?: LinkedServiceGetLinkedServiceOptionalParams + ): Promise; /** * Deletes a linked service. * @param linkedServiceName The linked service name. @@ -76,7 +73,7 @@ export interface LinkedServiceOperations { */ beginDeleteLinkedService( linkedServiceName: string, - options?: LinkedServiceOperationsDeleteLinkedServiceOptionalParams + options?: LinkedServiceDeleteLinkedServiceOptionalParams ): Promise, void>>; /** * Deletes a linked service. @@ -85,7 +82,7 @@ export interface LinkedServiceOperations { */ beginDeleteLinkedServiceAndWait( linkedServiceName: string, - options?: LinkedServiceOperationsDeleteLinkedServiceOptionalParams + options?: LinkedServiceDeleteLinkedServiceOptionalParams ): Promise; /** * Renames a linked service. @@ -96,7 +93,7 @@ export interface LinkedServiceOperations { beginRenameLinkedService( linkedServiceName: string, request: ArtifactRenameRequest, - options?: LinkedServiceOperationsRenameLinkedServiceOptionalParams + options?: LinkedServiceRenameLinkedServiceOptionalParams ): Promise, void>>; /** * Renames a linked service. @@ -107,6 +104,6 @@ export interface LinkedServiceOperations { beginRenameLinkedServiceAndWait( linkedServiceName: string, request: ArtifactRenameRequest, - options?: LinkedServiceOperationsRenameLinkedServiceOptionalParams + options?: LinkedServiceRenameLinkedServiceOptionalParams ): Promise; } diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/notebookOperationResult.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/notebookOperationResult.ts new file mode 100644 index 000000000000..9b587927a9c4 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/notebookOperationResult.ts @@ -0,0 +1,22 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import { NotebookOperationResultGetOptionalParams } from "../models"; + +/** Interface representing a NotebookOperationResult. */ +export interface NotebookOperationResult { + /** + * Get notebook operation result + * @param operationId Operation ID. + * @param options The options parameters. + */ + get( + operationId: string, + options?: NotebookOperationResultGetOptionalParams + ): Promise; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/notebookOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/notebookOperations.ts index 6ab6ca2317b7..4d650b5b805d 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/notebookOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/notebookOperations.ts @@ -6,20 +6,19 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { PollerLike, PollOperationState } from "@azure/core-lro"; import { NotebookResource, - NotebookOperationsGetNotebooksByWorkspaceOptionalParams, - NotebookOperationsGetNotebookSummaryByWorkSpaceOptionalParams, - NotebookOperationsCreateOrUpdateNotebookOptionalParams, - NotebookOperationsCreateOrUpdateNotebookResponse, - NotebookOperationsGetNotebookOptionalParams, - NotebookOperationsGetNotebookResponse, - NotebookOperationsDeleteNotebookOptionalParams, + NotebookGetNotebooksByWorkspaceOptionalParams, + NotebookGetNotebookSummaryByWorkSpaceOptionalParams, + NotebookCreateOrUpdateNotebookOptionalParams, + NotebookCreateOrUpdateNotebookResponse, + NotebookGetNotebookOptionalParams, + NotebookGetNotebookResponse, + NotebookDeleteNotebookOptionalParams, ArtifactRenameRequest, - NotebookOperationsRenameNotebookOptionalParams + NotebookRenameNotebookOptionalParams } from "../models"; /// @@ -30,14 +29,14 @@ export interface NotebookOperations { * @param options The options parameters. */ listNotebooksByWorkspace( - options?: NotebookOperationsGetNotebooksByWorkspaceOptionalParams + options?: NotebookGetNotebooksByWorkspaceOptionalParams ): PagedAsyncIterableIterator; /** * Lists a summary of Notebooks. * @param options The options parameters. */ listNotebookSummaryByWorkSpace( - options?: NotebookOperationsGetNotebookSummaryByWorkSpaceOptionalParams + options?: NotebookGetNotebookSummaryByWorkSpaceOptionalParams ): PagedAsyncIterableIterator; /** * Creates or updates a Note Book. @@ -48,11 +47,11 @@ export interface NotebookOperations { beginCreateOrUpdateNotebook( notebookName: string, notebook: NotebookResource, - options?: NotebookOperationsCreateOrUpdateNotebookOptionalParams + options?: NotebookCreateOrUpdateNotebookOptionalParams ): Promise< PollerLike< - PollOperationState, - NotebookOperationsCreateOrUpdateNotebookResponse + PollOperationState, + NotebookCreateOrUpdateNotebookResponse > >; /** @@ -64,8 +63,8 @@ export interface NotebookOperations { beginCreateOrUpdateNotebookAndWait( notebookName: string, notebook: NotebookResource, - options?: NotebookOperationsCreateOrUpdateNotebookOptionalParams - ): Promise; + options?: NotebookCreateOrUpdateNotebookOptionalParams + ): Promise; /** * Gets a Note Book. * @param notebookName The notebook name. @@ -73,8 +72,8 @@ export interface NotebookOperations { */ getNotebook( notebookName: string, - options?: NotebookOperationsGetNotebookOptionalParams - ): Promise; + options?: NotebookGetNotebookOptionalParams + ): Promise; /** * Deletes a Note book. * @param notebookName The notebook name. @@ -82,7 +81,7 @@ export interface NotebookOperations { */ beginDeleteNotebook( notebookName: string, - options?: NotebookOperationsDeleteNotebookOptionalParams + options?: NotebookDeleteNotebookOptionalParams ): Promise, void>>; /** * Deletes a Note book. @@ -91,7 +90,7 @@ export interface NotebookOperations { */ beginDeleteNotebookAndWait( notebookName: string, - options?: NotebookOperationsDeleteNotebookOptionalParams + options?: NotebookDeleteNotebookOptionalParams ): Promise; /** * Renames a notebook. @@ -102,7 +101,7 @@ export interface NotebookOperations { beginRenameNotebook( notebookName: string, request: ArtifactRenameRequest, - options?: NotebookOperationsRenameNotebookOptionalParams + options?: NotebookRenameNotebookOptionalParams ): Promise, void>>; /** * Renames a notebook. @@ -113,6 +112,6 @@ export interface NotebookOperations { beginRenameNotebookAndWait( notebookName: string, request: ArtifactRenameRequest, - options?: NotebookOperationsRenameNotebookOptionalParams + options?: NotebookRenameNotebookOptionalParams ): Promise; } diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipelineOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipelineOperations.ts index e0d54d226026..b0886471082f 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipelineOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipelineOperations.ts @@ -6,21 +6,20 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { PollerLike, PollOperationState } from "@azure/core-lro"; import { PipelineResource, - PipelineOperationsGetPipelinesByWorkspaceOptionalParams, - PipelineOperationsCreateOrUpdatePipelineOptionalParams, - PipelineOperationsCreateOrUpdatePipelineResponse, - PipelineOperationsGetPipelineOptionalParams, - PipelineOperationsGetPipelineResponse, - PipelineOperationsDeletePipelineOptionalParams, + PipelineGetPipelinesByWorkspaceOptionalParams, + PipelineCreateOrUpdatePipelineOptionalParams, + PipelineCreateOrUpdatePipelineResponse, + PipelineGetPipelineOptionalParams, + PipelineGetPipelineResponse, + PipelineDeletePipelineOptionalParams, ArtifactRenameRequest, - PipelineOperationsRenamePipelineOptionalParams, - PipelineOperationsCreatePipelineRunOptionalParams, - PipelineOperationsCreatePipelineRunResponse + PipelineRenamePipelineOptionalParams, + PipelineCreatePipelineRunOptionalParams, + PipelineCreatePipelineRunResponse } from "../models"; /// @@ -31,7 +30,7 @@ export interface PipelineOperations { * @param options The options parameters. */ listPipelinesByWorkspace( - options?: PipelineOperationsGetPipelinesByWorkspaceOptionalParams + options?: PipelineGetPipelinesByWorkspaceOptionalParams ): PagedAsyncIterableIterator; /** * Creates or updates a pipeline. @@ -42,11 +41,11 @@ export interface PipelineOperations { beginCreateOrUpdatePipeline( pipelineName: string, pipeline: PipelineResource, - options?: PipelineOperationsCreateOrUpdatePipelineOptionalParams + options?: PipelineCreateOrUpdatePipelineOptionalParams ): Promise< PollerLike< - PollOperationState, - PipelineOperationsCreateOrUpdatePipelineResponse + PollOperationState, + PipelineCreateOrUpdatePipelineResponse > >; /** @@ -58,8 +57,8 @@ export interface PipelineOperations { beginCreateOrUpdatePipelineAndWait( pipelineName: string, pipeline: PipelineResource, - options?: PipelineOperationsCreateOrUpdatePipelineOptionalParams - ): Promise; + options?: PipelineCreateOrUpdatePipelineOptionalParams + ): Promise; /** * Gets a pipeline. * @param pipelineName The pipeline name. @@ -67,8 +66,8 @@ export interface PipelineOperations { */ getPipeline( pipelineName: string, - options?: PipelineOperationsGetPipelineOptionalParams - ): Promise; + options?: PipelineGetPipelineOptionalParams + ): Promise; /** * Deletes a pipeline. * @param pipelineName The pipeline name. @@ -76,7 +75,7 @@ export interface PipelineOperations { */ beginDeletePipeline( pipelineName: string, - options?: PipelineOperationsDeletePipelineOptionalParams + options?: PipelineDeletePipelineOptionalParams ): Promise, void>>; /** * Deletes a pipeline. @@ -85,7 +84,7 @@ export interface PipelineOperations { */ beginDeletePipelineAndWait( pipelineName: string, - options?: PipelineOperationsDeletePipelineOptionalParams + options?: PipelineDeletePipelineOptionalParams ): Promise; /** * Renames a pipeline. @@ -96,7 +95,7 @@ export interface PipelineOperations { beginRenamePipeline( pipelineName: string, request: ArtifactRenameRequest, - options?: PipelineOperationsRenamePipelineOptionalParams + options?: PipelineRenamePipelineOptionalParams ): Promise, void>>; /** * Renames a pipeline. @@ -107,7 +106,7 @@ export interface PipelineOperations { beginRenamePipelineAndWait( pipelineName: string, request: ArtifactRenameRequest, - options?: PipelineOperationsRenamePipelineOptionalParams + options?: PipelineRenamePipelineOptionalParams ): Promise; /** * Creates a run of a pipeline. @@ -116,6 +115,6 @@ export interface PipelineOperations { */ createPipelineRun( pipelineName: string, - options?: PipelineOperationsCreatePipelineRunOptionalParams - ): Promise; + options?: PipelineCreatePipelineRunOptionalParams + ): Promise; } diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipelineRunOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipelineRunOperations.ts index 37e91fa06b60..5941980539bb 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipelineRunOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/pipelineRunOperations.ts @@ -8,13 +8,13 @@ import { RunFilterParameters, - PipelineRunOperationsQueryPipelineRunsByWorkspaceOptionalParams, - PipelineRunOperationsQueryPipelineRunsByWorkspaceResponse, - PipelineRunOperationsGetPipelineRunOptionalParams, - PipelineRunOperationsGetPipelineRunResponse, - PipelineRunOperationsQueryActivityRunsOptionalParams, - PipelineRunOperationsQueryActivityRunsResponse, - PipelineRunOperationsCancelPipelineRunOptionalParams + PipelineRunQueryPipelineRunsByWorkspaceOptionalParams, + PipelineRunQueryPipelineRunsByWorkspaceResponse, + PipelineRunGetPipelineRunOptionalParams, + PipelineRunGetPipelineRunResponse, + PipelineRunQueryActivityRunsOptionalParams, + PipelineRunQueryActivityRunsResponse, + PipelineRunCancelPipelineRunOptionalParams } from "../models"; /** Interface representing a PipelineRunOperations. */ @@ -26,8 +26,8 @@ export interface PipelineRunOperations { */ queryPipelineRunsByWorkspace( filterParameters: RunFilterParameters, - options?: PipelineRunOperationsQueryPipelineRunsByWorkspaceOptionalParams - ): Promise; + options?: PipelineRunQueryPipelineRunsByWorkspaceOptionalParams + ): Promise; /** * Get a pipeline run by its run ID. * @param runId The pipeline run identifier. @@ -35,8 +35,8 @@ export interface PipelineRunOperations { */ getPipelineRun( runId: string, - options?: PipelineRunOperationsGetPipelineRunOptionalParams - ): Promise; + options?: PipelineRunGetPipelineRunOptionalParams + ): Promise; /** * Query activity runs based on input filter conditions. * @param pipelineName The pipeline name. @@ -48,8 +48,8 @@ export interface PipelineRunOperations { pipelineName: string, runId: string, filterParameters: RunFilterParameters, - options?: PipelineRunOperationsQueryActivityRunsOptionalParams - ): Promise; + options?: PipelineRunQueryActivityRunsOptionalParams + ): Promise; /** * Cancel a pipeline run by its run ID. * @param runId The pipeline run identifier. @@ -57,6 +57,6 @@ export interface PipelineRunOperations { */ cancelPipelineRun( runId: string, - options?: PipelineRunOperationsCancelPipelineRunOptionalParams + options?: PipelineRunCancelPipelineRunOptionalParams ): Promise; } diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkConfigurationOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkConfigurationOperations.ts new file mode 100644 index 000000000000..e31a76ea5575 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkConfigurationOperations.ts @@ -0,0 +1,111 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ + +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { PollerLike, PollOperationState } from "@azure/core-lro"; +import { + SparkConfigurationResource, + SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams, + SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams, + SparkConfigurationCreateOrUpdateSparkConfigurationResponse, + SparkConfigurationGetSparkConfigurationOptionalParams, + SparkConfigurationGetSparkConfigurationResponse, + SparkConfigurationDeleteSparkConfigurationOptionalParams, + ArtifactRenameRequest, + SparkConfigurationRenameSparkConfigurationOptionalParams +} from "../models"; + +/// +/** Interface representing a SparkConfigurationOperations. */ +export interface SparkConfigurationOperations { + /** + * Lists sparkconfigurations. + * @param options The options parameters. + */ + listSparkConfigurationsByWorkspace( + options?: SparkConfigurationGetSparkConfigurationsByWorkspaceOptionalParams + ): PagedAsyncIterableIterator; + /** + * Creates or updates a sparkconfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param sparkConfiguration SparkConfiguration resource definition. + * @param options The options parameters. + */ + beginCreateOrUpdateSparkConfiguration( + sparkConfigurationName: string, + sparkConfiguration: SparkConfigurationResource, + options?: SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams + ): Promise< + PollerLike< + PollOperationState< + SparkConfigurationCreateOrUpdateSparkConfigurationResponse + >, + SparkConfigurationCreateOrUpdateSparkConfigurationResponse + > + >; + /** + * Creates or updates a sparkconfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param sparkConfiguration SparkConfiguration resource definition. + * @param options The options parameters. + */ + beginCreateOrUpdateSparkConfigurationAndWait( + sparkConfigurationName: string, + sparkConfiguration: SparkConfigurationResource, + options?: SparkConfigurationCreateOrUpdateSparkConfigurationOptionalParams + ): Promise; + /** + * Gets a sparkConfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param options The options parameters. + */ + getSparkConfiguration( + sparkConfigurationName: string, + options?: SparkConfigurationGetSparkConfigurationOptionalParams + ): Promise; + /** + * Deletes a sparkConfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param options The options parameters. + */ + beginDeleteSparkConfiguration( + sparkConfigurationName: string, + options?: SparkConfigurationDeleteSparkConfigurationOptionalParams + ): Promise, void>>; + /** + * Deletes a sparkConfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param options The options parameters. + */ + beginDeleteSparkConfigurationAndWait( + sparkConfigurationName: string, + options?: SparkConfigurationDeleteSparkConfigurationOptionalParams + ): Promise; + /** + * Renames a sparkConfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param request proposed new name. + * @param options The options parameters. + */ + beginRenameSparkConfiguration( + sparkConfigurationName: string, + request: ArtifactRenameRequest, + options?: SparkConfigurationRenameSparkConfigurationOptionalParams + ): Promise, void>>; + /** + * Renames a sparkConfiguration. + * @param sparkConfigurationName The spark Configuration name. + * @param request proposed new name. + * @param options The options parameters. + */ + beginRenameSparkConfigurationAndWait( + sparkConfigurationName: string, + request: ArtifactRenameRequest, + options?: SparkConfigurationRenameSparkConfigurationOptionalParams + ): Promise; +} diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkJobDefinitionOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkJobDefinitionOperations.ts index d80f14f39824..94cf2e1a97e9 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkJobDefinitionOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sparkJobDefinitionOperations.ts @@ -6,23 +6,22 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { PollerLike, PollOperationState } from "@azure/core-lro"; import { SparkJobDefinitionResource, - SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceOptionalParams, - SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionOptionalParams, - SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionResponse, - SparkJobDefinitionOperationsGetSparkJobDefinitionOptionalParams, - SparkJobDefinitionOperationsGetSparkJobDefinitionResponse, - SparkJobDefinitionOperationsDeleteSparkJobDefinitionOptionalParams, - SparkJobDefinitionOperationsExecuteSparkJobDefinitionOptionalParams, - SparkJobDefinitionOperationsExecuteSparkJobDefinitionResponse, + SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceOptionalParams, + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams, + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse, + SparkJobDefinitionGetSparkJobDefinitionOptionalParams, + SparkJobDefinitionGetSparkJobDefinitionResponse, + SparkJobDefinitionDeleteSparkJobDefinitionOptionalParams, + SparkJobDefinitionExecuteSparkJobDefinitionOptionalParams, + SparkJobDefinitionExecuteSparkJobDefinitionResponse, ArtifactRenameRequest, - SparkJobDefinitionOperationsRenameSparkJobDefinitionOptionalParams, - SparkJobDefinitionOperationsDebugSparkJobDefinitionOptionalParams, - SparkJobDefinitionOperationsDebugSparkJobDefinitionResponse + SparkJobDefinitionRenameSparkJobDefinitionOptionalParams, + SparkJobDefinitionDebugSparkJobDefinitionOptionalParams, + SparkJobDefinitionDebugSparkJobDefinitionResponse } from "../models"; /// @@ -33,7 +32,7 @@ export interface SparkJobDefinitionOperations { * @param options The options parameters. */ listSparkJobDefinitionsByWorkspace( - options?: SparkJobDefinitionOperationsGetSparkJobDefinitionsByWorkspaceOptionalParams + options?: SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceOptionalParams ): PagedAsyncIterableIterator; /** * Creates or updates a Spark Job Definition. @@ -44,13 +43,13 @@ export interface SparkJobDefinitionOperations { beginCreateOrUpdateSparkJobDefinition( sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, - options?: SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams ): Promise< PollerLike< PollOperationState< - SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionResponse + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse >, - SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionResponse + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse > >; /** @@ -62,10 +61,8 @@ export interface SparkJobDefinitionOperations { beginCreateOrUpdateSparkJobDefinitionAndWait( sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, - options?: SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionOptionalParams - ): Promise< - SparkJobDefinitionOperationsCreateOrUpdateSparkJobDefinitionResponse - >; + options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams + ): Promise; /** * Gets a Spark Job Definition. * @param sparkJobDefinitionName The spark job definition name. @@ -73,8 +70,8 @@ export interface SparkJobDefinitionOperations { */ getSparkJobDefinition( sparkJobDefinitionName: string, - options?: SparkJobDefinitionOperationsGetSparkJobDefinitionOptionalParams - ): Promise; + options?: SparkJobDefinitionGetSparkJobDefinitionOptionalParams + ): Promise; /** * Deletes a Spark Job Definition. * @param sparkJobDefinitionName The spark job definition name. @@ -82,7 +79,7 @@ export interface SparkJobDefinitionOperations { */ beginDeleteSparkJobDefinition( sparkJobDefinitionName: string, - options?: SparkJobDefinitionOperationsDeleteSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionDeleteSparkJobDefinitionOptionalParams ): Promise, void>>; /** * Deletes a Spark Job Definition. @@ -91,7 +88,7 @@ export interface SparkJobDefinitionOperations { */ beginDeleteSparkJobDefinitionAndWait( sparkJobDefinitionName: string, - options?: SparkJobDefinitionOperationsDeleteSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionDeleteSparkJobDefinitionOptionalParams ): Promise; /** * Executes the spark job definition. @@ -100,13 +97,11 @@ export interface SparkJobDefinitionOperations { */ beginExecuteSparkJobDefinition( sparkJobDefinitionName: string, - options?: SparkJobDefinitionOperationsExecuteSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionExecuteSparkJobDefinitionOptionalParams ): Promise< PollerLike< - PollOperationState< - SparkJobDefinitionOperationsExecuteSparkJobDefinitionResponse - >, - SparkJobDefinitionOperationsExecuteSparkJobDefinitionResponse + PollOperationState, + SparkJobDefinitionExecuteSparkJobDefinitionResponse > >; /** @@ -116,8 +111,8 @@ export interface SparkJobDefinitionOperations { */ beginExecuteSparkJobDefinitionAndWait( sparkJobDefinitionName: string, - options?: SparkJobDefinitionOperationsExecuteSparkJobDefinitionOptionalParams - ): Promise; + options?: SparkJobDefinitionExecuteSparkJobDefinitionOptionalParams + ): Promise; /** * Renames a sparkJobDefinition. * @param sparkJobDefinitionName The spark job definition name. @@ -127,7 +122,7 @@ export interface SparkJobDefinitionOperations { beginRenameSparkJobDefinition( sparkJobDefinitionName: string, request: ArtifactRenameRequest, - options?: SparkJobDefinitionOperationsRenameSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionRenameSparkJobDefinitionOptionalParams ): Promise, void>>; /** * Renames a sparkJobDefinition. @@ -138,7 +133,7 @@ export interface SparkJobDefinitionOperations { beginRenameSparkJobDefinitionAndWait( sparkJobDefinitionName: string, request: ArtifactRenameRequest, - options?: SparkJobDefinitionOperationsRenameSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionRenameSparkJobDefinitionOptionalParams ): Promise; /** * Debug the spark job definition. @@ -147,13 +142,11 @@ export interface SparkJobDefinitionOperations { */ beginDebugSparkJobDefinition( sparkJobDefinitionAzureResource: SparkJobDefinitionResource, - options?: SparkJobDefinitionOperationsDebugSparkJobDefinitionOptionalParams + options?: SparkJobDefinitionDebugSparkJobDefinitionOptionalParams ): Promise< PollerLike< - PollOperationState< - SparkJobDefinitionOperationsDebugSparkJobDefinitionResponse - >, - SparkJobDefinitionOperationsDebugSparkJobDefinitionResponse + PollOperationState, + SparkJobDefinitionDebugSparkJobDefinitionResponse > >; /** @@ -163,6 +156,6 @@ export interface SparkJobDefinitionOperations { */ beginDebugSparkJobDefinitionAndWait( sparkJobDefinitionAzureResource: SparkJobDefinitionResource, - options?: SparkJobDefinitionOperationsDebugSparkJobDefinitionOptionalParams - ): Promise; + options?: SparkJobDefinitionDebugSparkJobDefinitionOptionalParams + ): Promise; } diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlScriptOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlScriptOperations.ts index 29eebb5430a6..383bd64aabac 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlScriptOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/sqlScriptOperations.ts @@ -6,19 +6,18 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { PollerLike, PollOperationState } from "@azure/core-lro"; import { SqlScriptResource, - SqlScriptOperationsGetSqlScriptsByWorkspaceOptionalParams, - SqlScriptOperationsCreateOrUpdateSqlScriptOptionalParams, - SqlScriptOperationsCreateOrUpdateSqlScriptResponse, - SqlScriptOperationsGetSqlScriptOptionalParams, - SqlScriptOperationsGetSqlScriptResponse, - SqlScriptOperationsDeleteSqlScriptOptionalParams, + SqlScriptGetSqlScriptsByWorkspaceOptionalParams, + SqlScriptCreateOrUpdateSqlScriptOptionalParams, + SqlScriptCreateOrUpdateSqlScriptResponse, + SqlScriptGetSqlScriptOptionalParams, + SqlScriptGetSqlScriptResponse, + SqlScriptDeleteSqlScriptOptionalParams, ArtifactRenameRequest, - SqlScriptOperationsRenameSqlScriptOptionalParams + SqlScriptRenameSqlScriptOptionalParams } from "../models"; /// @@ -29,7 +28,7 @@ export interface SqlScriptOperations { * @param options The options parameters. */ listSqlScriptsByWorkspace( - options?: SqlScriptOperationsGetSqlScriptsByWorkspaceOptionalParams + options?: SqlScriptGetSqlScriptsByWorkspaceOptionalParams ): PagedAsyncIterableIterator; /** * Creates or updates a Sql Script. @@ -40,11 +39,11 @@ export interface SqlScriptOperations { beginCreateOrUpdateSqlScript( sqlScriptName: string, sqlScript: SqlScriptResource, - options?: SqlScriptOperationsCreateOrUpdateSqlScriptOptionalParams + options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams ): Promise< PollerLike< - PollOperationState, - SqlScriptOperationsCreateOrUpdateSqlScriptResponse + PollOperationState, + SqlScriptCreateOrUpdateSqlScriptResponse > >; /** @@ -56,8 +55,8 @@ export interface SqlScriptOperations { beginCreateOrUpdateSqlScriptAndWait( sqlScriptName: string, sqlScript: SqlScriptResource, - options?: SqlScriptOperationsCreateOrUpdateSqlScriptOptionalParams - ): Promise; + options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams + ): Promise; /** * Gets a sql script. * @param sqlScriptName The sql script name. @@ -65,8 +64,8 @@ export interface SqlScriptOperations { */ getSqlScript( sqlScriptName: string, - options?: SqlScriptOperationsGetSqlScriptOptionalParams - ): Promise; + options?: SqlScriptGetSqlScriptOptionalParams + ): Promise; /** * Deletes a Sql Script. * @param sqlScriptName The sql script name. @@ -74,7 +73,7 @@ export interface SqlScriptOperations { */ beginDeleteSqlScript( sqlScriptName: string, - options?: SqlScriptOperationsDeleteSqlScriptOptionalParams + options?: SqlScriptDeleteSqlScriptOptionalParams ): Promise, void>>; /** * Deletes a Sql Script. @@ -83,7 +82,7 @@ export interface SqlScriptOperations { */ beginDeleteSqlScriptAndWait( sqlScriptName: string, - options?: SqlScriptOperationsDeleteSqlScriptOptionalParams + options?: SqlScriptDeleteSqlScriptOptionalParams ): Promise; /** * Renames a sqlScript. @@ -94,7 +93,7 @@ export interface SqlScriptOperations { beginRenameSqlScript( sqlScriptName: string, request: ArtifactRenameRequest, - options?: SqlScriptOperationsRenameSqlScriptOptionalParams + options?: SqlScriptRenameSqlScriptOptionalParams ): Promise, void>>; /** * Renames a sqlScript. @@ -105,6 +104,6 @@ export interface SqlScriptOperations { beginRenameSqlScriptAndWait( sqlScriptName: string, request: ArtifactRenameRequest, - options?: SqlScriptOperationsRenameSqlScriptOptionalParams + options?: SqlScriptRenameSqlScriptOptionalParams ): Promise; } diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerOperations.ts index 8c3124d0e435..7d6e0e6fc04f 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerOperations.ts @@ -6,25 +6,24 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import "@azure/core-paging"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { PollerLike, PollOperationState } from "@azure/core-lro"; import { TriggerResource, - TriggerOperationsGetTriggersByWorkspaceOptionalParams, - TriggerOperationsCreateOrUpdateTriggerOptionalParams, - TriggerOperationsCreateOrUpdateTriggerResponse, - TriggerOperationsGetTriggerOptionalParams, - TriggerOperationsGetTriggerResponse, - TriggerOperationsDeleteTriggerOptionalParams, - TriggerOperationsSubscribeTriggerToEventsOptionalParams, - TriggerOperationsSubscribeTriggerToEventsResponse, - TriggerOperationsGetEventSubscriptionStatusOptionalParams, - TriggerOperationsGetEventSubscriptionStatusResponse, - TriggerOperationsUnsubscribeTriggerFromEventsOptionalParams, - TriggerOperationsUnsubscribeTriggerFromEventsResponse, - TriggerOperationsStartTriggerOptionalParams, - TriggerOperationsStopTriggerOptionalParams + TriggerGetTriggersByWorkspaceOptionalParams, + TriggerCreateOrUpdateTriggerOptionalParams, + TriggerCreateOrUpdateTriggerResponse, + TriggerGetTriggerOptionalParams, + TriggerGetTriggerResponse, + TriggerDeleteTriggerOptionalParams, + TriggerSubscribeTriggerToEventsOptionalParams, + TriggerSubscribeTriggerToEventsResponse, + TriggerGetEventSubscriptionStatusOptionalParams, + TriggerGetEventSubscriptionStatusResponse, + TriggerUnsubscribeTriggerFromEventsOptionalParams, + TriggerUnsubscribeTriggerFromEventsResponse, + TriggerStartTriggerOptionalParams, + TriggerStopTriggerOptionalParams } from "../models"; /// @@ -35,7 +34,7 @@ export interface TriggerOperations { * @param options The options parameters. */ listTriggersByWorkspace( - options?: TriggerOperationsGetTriggersByWorkspaceOptionalParams + options?: TriggerGetTriggersByWorkspaceOptionalParams ): PagedAsyncIterableIterator; /** * Creates or updates a trigger. @@ -46,11 +45,11 @@ export interface TriggerOperations { beginCreateOrUpdateTrigger( triggerName: string, trigger: TriggerResource, - options?: TriggerOperationsCreateOrUpdateTriggerOptionalParams + options?: TriggerCreateOrUpdateTriggerOptionalParams ): Promise< PollerLike< - PollOperationState, - TriggerOperationsCreateOrUpdateTriggerResponse + PollOperationState, + TriggerCreateOrUpdateTriggerResponse > >; /** @@ -62,8 +61,8 @@ export interface TriggerOperations { beginCreateOrUpdateTriggerAndWait( triggerName: string, trigger: TriggerResource, - options?: TriggerOperationsCreateOrUpdateTriggerOptionalParams - ): Promise; + options?: TriggerCreateOrUpdateTriggerOptionalParams + ): Promise; /** * Gets a trigger. * @param triggerName The trigger name. @@ -71,8 +70,8 @@ export interface TriggerOperations { */ getTrigger( triggerName: string, - options?: TriggerOperationsGetTriggerOptionalParams - ): Promise; + options?: TriggerGetTriggerOptionalParams + ): Promise; /** * Deletes a trigger. * @param triggerName The trigger name. @@ -80,7 +79,7 @@ export interface TriggerOperations { */ beginDeleteTrigger( triggerName: string, - options?: TriggerOperationsDeleteTriggerOptionalParams + options?: TriggerDeleteTriggerOptionalParams ): Promise, void>>; /** * Deletes a trigger. @@ -89,7 +88,7 @@ export interface TriggerOperations { */ beginDeleteTriggerAndWait( triggerName: string, - options?: TriggerOperationsDeleteTriggerOptionalParams + options?: TriggerDeleteTriggerOptionalParams ): Promise; /** * Subscribe event trigger to events. @@ -98,11 +97,11 @@ export interface TriggerOperations { */ beginSubscribeTriggerToEvents( triggerName: string, - options?: TriggerOperationsSubscribeTriggerToEventsOptionalParams + options?: TriggerSubscribeTriggerToEventsOptionalParams ): Promise< PollerLike< - PollOperationState, - TriggerOperationsSubscribeTriggerToEventsResponse + PollOperationState, + TriggerSubscribeTriggerToEventsResponse > >; /** @@ -112,8 +111,8 @@ export interface TriggerOperations { */ beginSubscribeTriggerToEventsAndWait( triggerName: string, - options?: TriggerOperationsSubscribeTriggerToEventsOptionalParams - ): Promise; + options?: TriggerSubscribeTriggerToEventsOptionalParams + ): Promise; /** * Get a trigger's event subscription status. * @param triggerName The trigger name. @@ -121,8 +120,8 @@ export interface TriggerOperations { */ getEventSubscriptionStatus( triggerName: string, - options?: TriggerOperationsGetEventSubscriptionStatusOptionalParams - ): Promise; + options?: TriggerGetEventSubscriptionStatusOptionalParams + ): Promise; /** * Unsubscribe event trigger from events. * @param triggerName The trigger name. @@ -130,11 +129,11 @@ export interface TriggerOperations { */ beginUnsubscribeTriggerFromEvents( triggerName: string, - options?: TriggerOperationsUnsubscribeTriggerFromEventsOptionalParams + options?: TriggerUnsubscribeTriggerFromEventsOptionalParams ): Promise< PollerLike< - PollOperationState, - TriggerOperationsUnsubscribeTriggerFromEventsResponse + PollOperationState, + TriggerUnsubscribeTriggerFromEventsResponse > >; /** @@ -144,8 +143,8 @@ export interface TriggerOperations { */ beginUnsubscribeTriggerFromEventsAndWait( triggerName: string, - options?: TriggerOperationsUnsubscribeTriggerFromEventsOptionalParams - ): Promise; + options?: TriggerUnsubscribeTriggerFromEventsOptionalParams + ): Promise; /** * Starts a trigger. * @param triggerName The trigger name. @@ -153,7 +152,7 @@ export interface TriggerOperations { */ beginStartTrigger( triggerName: string, - options?: TriggerOperationsStartTriggerOptionalParams + options?: TriggerStartTriggerOptionalParams ): Promise, void>>; /** * Starts a trigger. @@ -162,7 +161,7 @@ export interface TriggerOperations { */ beginStartTriggerAndWait( triggerName: string, - options?: TriggerOperationsStartTriggerOptionalParams + options?: TriggerStartTriggerOptionalParams ): Promise; /** * Stops a trigger. @@ -171,7 +170,7 @@ export interface TriggerOperations { */ beginStopTrigger( triggerName: string, - options?: TriggerOperationsStopTriggerOptionalParams + options?: TriggerStopTriggerOptionalParams ): Promise, void>>; /** * Stops a trigger. @@ -180,6 +179,6 @@ export interface TriggerOperations { */ beginStopTriggerAndWait( triggerName: string, - options?: TriggerOperationsStopTriggerOptionalParams + options?: TriggerStopTriggerOptionalParams ): Promise; } diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerRunOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerRunOperations.ts index 881acdd0efc4..0c628f2cba78 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerRunOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/triggerRunOperations.ts @@ -7,11 +7,11 @@ */ import { - TriggerRunOperationsRerunTriggerInstanceOptionalParams, - TriggerRunOperationsCancelTriggerInstanceOptionalParams, + TriggerRunRerunTriggerInstanceOptionalParams, + TriggerRunCancelTriggerInstanceOptionalParams, RunFilterParameters, - TriggerRunOperationsQueryTriggerRunsByWorkspaceOptionalParams, - TriggerRunOperationsQueryTriggerRunsByWorkspaceResponse + TriggerRunQueryTriggerRunsByWorkspaceOptionalParams, + TriggerRunQueryTriggerRunsByWorkspaceResponse } from "../models"; /** Interface representing a TriggerRunOperations. */ @@ -25,7 +25,7 @@ export interface TriggerRunOperations { rerunTriggerInstance( triggerName: string, runId: string, - options?: TriggerRunOperationsRerunTriggerInstanceOptionalParams + options?: TriggerRunRerunTriggerInstanceOptionalParams ): Promise; /** * Cancel single trigger instance by runId. @@ -36,7 +36,7 @@ export interface TriggerRunOperations { cancelTriggerInstance( triggerName: string, runId: string, - options?: TriggerRunOperationsCancelTriggerInstanceOptionalParams + options?: TriggerRunCancelTriggerInstanceOptionalParams ): Promise; /** * Query trigger runs. @@ -45,6 +45,6 @@ export interface TriggerRunOperations { */ queryTriggerRunsByWorkspace( filterParameters: RunFilterParameters, - options?: TriggerRunOperationsQueryTriggerRunsByWorkspaceOptionalParams - ): Promise; + options?: TriggerRunQueryTriggerRunsByWorkspaceOptionalParams + ): Promise; } diff --git a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/workspaceOperations.ts b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/workspaceOperations.ts index 122d87dfafc0..4541ae1d6b16 100644 --- a/sdk/synapse/synapse-artifacts/src/operationsInterfaces/workspaceOperations.ts +++ b/sdk/synapse/synapse-artifacts/src/operationsInterfaces/workspaceOperations.ts @@ -6,10 +6,7 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import { - WorkspaceOperationsGetOptionalParams, - WorkspaceOperationsGetResponse -} from "../models"; +import { WorkspaceGetOptionalParams, WorkspaceGetResponse } from "../models"; /** Interface representing a WorkspaceOperations. */ export interface WorkspaceOperations { @@ -17,7 +14,5 @@ export interface WorkspaceOperations { * Get Workspace * @param options The options parameters. */ - get( - options?: WorkspaceOperationsGetOptionalParams - ): Promise; + get(options?: WorkspaceGetOptionalParams): Promise; } diff --git a/sdk/synapse/synapse-artifacts/swagger/README.md b/sdk/synapse/synapse-artifacts/swagger/README.md index 34ccdc0130a3..ab3d156de058 100644 --- a/sdk/synapse/synapse-artifacts/swagger/README.md +++ b/sdk/synapse/synapse-artifacts/swagger/README.md @@ -15,9 +15,10 @@ clear-output-folder: false tracing-info: namespace: "Azure.Synapse.Artifacts" packagePrefix: "Microsoft.Synapse" -require: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/3d6211cf28f83236cdf78e7cfc50efd3fb7cba72/specification/synapse/data-plane/readme.md +require: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/bee724836ffdeb5458274037dc75f4d43576b5e3/specification/synapse/data-plane/readme.md use-extension: "@autorest/typescript": "latest" +tag: package-artifacts-composite-v1 typescript: generate-metadata: false