Skip to content

Commit

Permalink
[GLUTEN-1658] [CORE] feat: Support SparkResourcesUtil.scala in k8s
Browse files Browse the repository at this point in the history
Support get the total cores of the Spark application in kubernetes.

Signed-off-by: Binbin Zou <binbin.zou@kyligence.io>
  • Loading branch information
zbbkeepgoing authored May 19, 2023
1 parent 2967492 commit abb0950
Showing 1 changed file with 3 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@ object SparkResourcesUtil extends Logging {
sqlConf.getConfString("spark.master") match {
case local if local.startsWith("local") =>
sqlConf.getConfString("spark.default.parallelism", "1").toInt
case yarn if yarn.startsWith("yarn") =>
val instances = getYarnExecutorNum(sqlConf)
case otherResourceManager if otherResourceManager.matches("(yarn|k8s:).*") =>
val instances = getExecutorNum(sqlConf)
val cores = sqlConf.getConfString("spark.executor.cores", "1").toInt
Math.max(instances * cores, sqlConf.getConfString("spark.default.parallelism", "1").toInt)
case standalone if standalone.startsWith("spark:") =>
Expand All @@ -43,7 +43,7 @@ object SparkResourcesUtil extends Logging {
/**
* Get the executor number for yarn
*/
def getYarnExecutorNum(sqlConf: SQLConf): Int = {
def getExecutorNum(sqlConf: SQLConf): Int = {
if (sqlConf.getConfString("spark.dynamicAllocation.enabled", "false").toBoolean) {
val maxExecutors =
sqlConf.getConfString("spark.dynamicAllocation.maxExecutors",
Expand Down

0 comments on commit abb0950

Please sign in to comment.