Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add scan folder and spark config support in Sparkjob activity #21768

Merged
merged 4 commits into from
Jan 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions custom-words.txt
Original file line number Diff line number Diff line change
Expand Up @@ -1380,6 +1380,7 @@ netbios
netbsd
netdev
Netdev
Netezza
netfilter
Netfilter
networkinterfaces
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7931,6 +7931,10 @@
"type": "object",
"description": "The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string)."
},
"scanFolder": {
"type": "object",
"description": "Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean)."
},
"className": {
"type": "object",
"description": "The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string)."
Expand Down Expand Up @@ -7979,9 +7983,33 @@
"description": "Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string)."
},
"numExecutors": {
"description": "Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide.",
"type": "integer",
"format": "int32"
"description": "Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer).",
"type": "object"
},
"configurationType": {
"enum": [
"Default",
"Customized",
"Artifact"
],
"type": "string",
"description": "The type of the spark config.",
"x-ms-enum": {
"name": "ConfigurationType",
"modelAsString": true
}
},
"targetSparkConfiguration": {
"description": "The spark configuration of the spark job.",
"$ref": "#/definitions/SparkConfigurationParametrizationReference"
},
"sparkConfig": {
"description": "Spark configuration property.",
"type": "object",
"additionalProperties": {
"type": "object",
"description": "Type: string (or Expression with resultType string)."
}
}
},
"required": [
Expand Down Expand Up @@ -8012,6 +8040,31 @@
"type",
"referenceName"
]
},
"SparkConfigurationParametrizationReference": {
"description": "Spark configuration reference.",
"type": "object",
"properties": {
"type": {
"description": "Spark configuration reference type.",
"type": "string",
"enum": [
"SparkConfigurationReference"
],
"x-ms-enum": {
"name": "SparkConfigurationReferenceType",
"modelAsString": true
}
},
"referenceName": {
"description": "Reference spark configuration name. Type: string (or Expression with resultType string).",
"type": "object"
}
},
"required": [
"type",
"referenceName"
]
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7497,6 +7497,10 @@
"type": "object",
"description": "The main file used for the job, which will override the 'file' of the spark job definition you provide. Type: string (or Expression with resultType string)."
},
"scanFolder": {
"type": "object",
"description": "Scanning subfolders from the root folder of the main definition file, these files will be added as reference files. The folders named 'jars', 'pyFiles', 'files' or 'archives' will be scanned, and the folders name are case sensitive. Type: boolean (or Expression with resultType boolean)."
},
"className": {
"type": "object",
"description": "The fully-qualified identifier or the main class that is in the main definition file, which will override the 'className' of the spark job definition you provide. Type: string (or Expression with resultType string)."
Expand Down Expand Up @@ -7545,15 +7549,64 @@
"description": "Number of core and memory to be used for driver allocated in the specified Spark pool for the job, which will be used for overriding 'driverCores' and 'driverMemory' of the spark job definition you provide. Type: string (or Expression with resultType string)."
},
"numExecutors": {
"description": "Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide.",
"type": "integer",
"format": "int32"
"description": "Number of executors to launch for this job, which will override the 'numExecutors' of the spark job definition you provide. Type: integer (or Expression with resultType integer).",
"type": "object"
},
"configurationType": {
"enum": [
"Default",
"Customized",
"Artifact"
],
"type": "string",
"description": "The type of the spark config.",
"x-ms-enum": {
"name": "ConfigurationType",
"modelAsString": true
}
},
"targetSparkConfiguration": {
"description": "The spark configuration of the spark job.",
"$ref": "#/definitions/SparkConfigurationParametrizationReference"
},
"sparkConfig": {
"description": "Spark configuration property.",
"type": "object",
"additionalProperties": {
"type": "object",
"description": "Type: string (or Expression with resultType string)."
}
}
},
"required": [
"sparkJob"
]
},
"SparkConfigurationParametrizationReference": {
"description": "Spark configuration reference.",
"type": "object",
"properties": {
"type": {
"description": "Spark configuration reference type.",
"type": "string",
"enum": [
"SparkConfigurationReference"
],
"x-ms-enum": {
"name": "SparkConfigurationReferenceType",
"modelAsString": true
}
},
"referenceName": {
"description": "Reference spark configuration name. Type: string (or Expression with resultType string).",
"type": "object"
}
},
"required": [
"type",
"referenceName"
]
},
"SqlPoolStoredProcedureActivity": {
"description": "Execute SQL pool stored procedure activity.",
"type": "object",
Expand Down