diff --git a/specification/synapse/data-plane/Microsoft.Synapse/stable/2020-12-01/artifacts.json b/specification/synapse/data-plane/Microsoft.Synapse/stable/2020-12-01/artifacts.json index d18b774130f2..5cdf4d9eb984 100644 --- a/specification/synapse/data-plane/Microsoft.Synapse/stable/2020-12-01/artifacts.json +++ b/specification/synapse/data-plane/Microsoft.Synapse/stable/2020-12-01/artifacts.json @@ -776,8 +776,8 @@ } }, "referenceName": { - "type": "string", - "description": "Reference notebook name." + "type": "object", + "description": "Reference notebook name. Type: string (or Expression with resultType string)." } }, "required": [ @@ -835,6 +835,31 @@ "referenceName" ] }, + "BigDataPoolParametrizationReference": { + "description": "Big data pool reference type.", + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Big data pool reference type.", + "enum": [ + "BigDataPoolReference" + ], + "x-ms-enum": { + "name": "BigDataPoolReferenceType", + "modelAsString": true + } + }, + "referenceName": { + "type": "object", + "description": "Reference big data pool name. Type: string (or Expression with resultType string)." + } + }, + "required": [ + "type", + "referenceName" + ] + }, "ArtifactRenameRequest": { "description": "Request body structure for rename artifact.", "type": "object", diff --git a/specification/synapse/data-plane/Microsoft.Synapse/stable/2020-12-01/entityTypes/Pipeline.json b/specification/synapse/data-plane/Microsoft.Synapse/stable/2020-12-01/entityTypes/Pipeline.json index fbc4761ce067..6a89b1c8c5c8 100644 --- a/specification/synapse/data-plane/Microsoft.Synapse/stable/2020-12-01/entityTypes/Pipeline.json +++ b/specification/synapse/data-plane/Microsoft.Synapse/stable/2020-12-01/entityTypes/Pipeline.json @@ -7354,6 +7354,10 @@ "description": "Synapse notebook reference.", "$ref": "../artifacts.json#/definitions/SynapseNotebookReference" }, + "sparkPool": { + "description": "The name of the big data pool which will be used to execute the notebook.", + "$ref": "../artifacts.json#/definitions/BigDataPoolParametrizationReference" + }, "parameters": { "description": "Notebook parameters.", "type": "object", @@ -7401,6 +7405,41 @@ "items": { "description": "Type: string (or Expression with resultType string)." } + }, + "file": { + "type": "object", + "description": "The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string)." + }, + "className": { + "type": "object", + "description": "The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string)." + }, + "files": { + "description": "Additional files used for reference in the main definition file, which will override the 'files' of the spark job definition you provide.", + "type": "array", + "items": { + "description": "Type: string (or Expression with resultType string)." + } + }, + "targetBigDataPool": { + "$ref": "../artifacts.json#/definitions/BigDataPoolParametrizationReference", + "description": "The name of the big data pool which will be used to execute the spark batch job, which will override the 'targetBigDataPool' of the spark job definition you provide." + }, + "executorSize": { + "type": "object", + "description": "Number of core and memory to be used for executors allocated in the specified Spark pool for the job, which will be used for overriding 'executorCores' and 'executorMemory' of the spark job definition you provide. Type: string (or Expression with resultType string)." + }, + "conf": { + "type": "object", + "description": "Spark configuration properties, which will override the 'conf' of the spark job definition you provide." + }, + "driverSize": { + "type": "object", + "description": "Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string)." + }, + "numExecutors": { + "description": "Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide.", + "type": "integer" } }, "required": [