Skip to content

Commit

Permalink
Temporarily disable cache test except for spark 3.1.1 (NVIDIA#3319)
Browse files Browse the repository at this point in the history
Signed-off-by: Thomas Graves <tgraves@nvidia.com>
  • Loading branch information
tgravescs authored Aug 27, 2021
1 parent 91b3a68 commit 97baf1f
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 10 deletions.
18 changes: 10 additions & 8 deletions jenkins/databricks/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -64,11 +64,12 @@ if [ -d "$LOCAL_JAR_PATH" ]; then
## Run tests with jars in the LOCAL_JAR_PATH dir downloading from the denpedency repo
LOCAL_JAR_PATH=$LOCAL_JAR_PATH bash $LOCAL_JAR_PATH/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE

# Temporarily only run on Spark 3.1.1 (https://github.com/NVIDIA/spark-rapids/issues/3311)
## Run cache tests
if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then
PYSP_TEST_spark_sql_cache_serializer=${PCBS_CONF} \
LOCAL_JAR_PATH=$LOCAL_JAR_PATH bash $LOCAL_JAR_PATH/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE -k cache_test
fi
#if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then
# PYSP_TEST_spark_sql_cache_serializer=${PCBS_CONF} \
# LOCAL_JAR_PATH=$LOCAL_JAR_PATH bash $LOCAL_JAR_PATH/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE -k cache_test
#fi

## Run cudf-udf tests
CUDF_UDF_TEST_ARGS="$CUDF_UDF_TEST_ARGS --conf spark.executorEnv.PYTHONPATH=`ls $LOCAL_JAR_PATH/rapids-4-spark_*.jar | grep -v 'tests.jar'`"
Expand All @@ -79,11 +80,12 @@ else
## Run tests with jars building from the spark-rapids source code
bash /home/ubuntu/spark-rapids/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE

# Temporarily only run on Spark 3.1.1 (https://github.com/NVIDIA/spark-rapids/issues/3311)
## Run cache tests
if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then
PYSP_TEST_spark_sql_cache_serializer=${PCBS_CONF} \
bash /home/ubuntu/spark-rapids/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE -k cache_test
fi
#if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then
# PYSP_TEST_spark_sql_cache_serializer=${PCBS_CONF} \
# bash /home/ubuntu/spark-rapids/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE -k cache_test
#fi

## Run cudf-udf tests
CUDF_UDF_TEST_ARGS="$CUDF_UDF_TEST_ARGS --conf spark.executorEnv.PYTHONPATH=`ls /home/ubuntu/spark-rapids/dist/target/rapids-4-spark_*.jar | grep -v 'tests.jar'`"
Expand Down
8 changes: 6 additions & 2 deletions jenkins/spark-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,9 @@ IS_SPARK_311_OR_LATER=0
export SPARK_TASK_MAXFAILURES=1
[[ "$IS_SPARK_311_OR_LATER" -eq "0" ]] && SPARK_TASK_MAXFAILURES=4

IS_SPARK_311=0
[[ "$SPARK_VER" == "3.1.1" ]] && IS_SPARK_311=1

export PATH="$SPARK_HOME/bin:$SPARK_HOME/sbin:$PATH"

#stop and restart SPARK ETL
Expand Down Expand Up @@ -171,8 +174,9 @@ else
fi
# cudf_udf_test
run_test cudf_udf_test
# only run cache tests with our serializer in nightly test for Spark version >= 3.1.1
if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then

# Temporarily only run on Spark 3.1.1 (https://github.com/NVIDIA/spark-rapids/issues/3311)
if [[ "$IS_SPARK_311" -eq "1" ]]; then
run_test cache_serializer
fi

Expand Down

0 comments on commit 97baf1f

Please sign in to comment.