Skip to content

Commit

Permalink
Revert "Temporarily disable cache test except for spark 3.1.1 (NVIDIA…
Browse files Browse the repository at this point in the history
…#3319)"

This reverts commit 97baf1f.

Signed-off-by: Raza Jafri <rjafri@nvidia.com>
  • Loading branch information
razajafri committed Aug 31, 2021
1 parent aa98024 commit a380df0
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 16 deletions.
18 changes: 8 additions & 10 deletions jenkins/databricks/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -64,12 +64,11 @@ if [ -d "$LOCAL_JAR_PATH" ]; then
## Run tests with jars in the LOCAL_JAR_PATH dir downloading from the denpedency repo
LOCAL_JAR_PATH=$LOCAL_JAR_PATH bash $LOCAL_JAR_PATH/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE

# Temporarily only run on Spark 3.1.1 (https://github.com/NVIDIA/spark-rapids/issues/3311)
## Run cache tests
#if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then
# PYSP_TEST_spark_sql_cache_serializer=${PCBS_CONF} \
# LOCAL_JAR_PATH=$LOCAL_JAR_PATH bash $LOCAL_JAR_PATH/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE -k cache_test
#fi
if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then
PYSP_TEST_spark_sql_cache_serializer=${PCBS_CONF} \
LOCAL_JAR_PATH=$LOCAL_JAR_PATH bash $LOCAL_JAR_PATH/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE -k cache_test
fi

## Run cudf-udf tests
CUDF_UDF_TEST_ARGS="$CUDF_UDF_TEST_ARGS --conf spark.executorEnv.PYTHONPATH=`ls $LOCAL_JAR_PATH/rapids-4-spark_*.jar | grep -v 'tests.jar'`"
Expand All @@ -80,12 +79,11 @@ else
## Run tests with jars building from the spark-rapids source code
bash /home/ubuntu/spark-rapids/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE

# Temporarily only run on Spark 3.1.1 (https://github.com/NVIDIA/spark-rapids/issues/3311)
## Run cache tests
#if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then
# PYSP_TEST_spark_sql_cache_serializer=${PCBS_CONF} \
# bash /home/ubuntu/spark-rapids/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE -k cache_test
#fi
if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then
PYSP_TEST_spark_sql_cache_serializer=${PCBS_CONF} \
bash /home/ubuntu/spark-rapids/integration_tests/run_pyspark_from_build.sh --runtime_env="databricks" --test_type=$TEST_TYPE -k cache_test
fi

## Run cudf-udf tests
CUDF_UDF_TEST_ARGS="$CUDF_UDF_TEST_ARGS --conf spark.executorEnv.PYTHONPATH=`ls /home/ubuntu/spark-rapids/dist/target/rapids-4-spark_*.jar | grep -v 'tests.jar'`"
Expand Down
8 changes: 2 additions & 6 deletions jenkins/spark-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,6 @@ IS_SPARK_311_OR_LATER=0
export SPARK_TASK_MAXFAILURES=1
[[ "$IS_SPARK_311_OR_LATER" -eq "0" ]] && SPARK_TASK_MAXFAILURES=4

IS_SPARK_311=0
[[ "$SPARK_VER" == "3.1.1" ]] && IS_SPARK_311=1

export PATH="$SPARK_HOME/bin:$SPARK_HOME/sbin:$PATH"

#stop and restart SPARK ETL
Expand Down Expand Up @@ -174,9 +171,8 @@ else
fi
# cudf_udf_test
run_test cudf_udf_test

# Temporarily only run on Spark 3.1.1 (https://github.com/NVIDIA/spark-rapids/issues/3311)
if [[ "$IS_SPARK_311" -eq "1" ]]; then
# only run cache tests with our serializer in nightly test for Spark version >= 3.1.1
if [[ "$IS_SPARK_311_OR_LATER" -eq "1" ]]; then
run_test cache_serializer
fi

Expand Down

0 comments on commit a380df0

Please sign in to comment.