diff --git a/jenkins/spark-tests.sh b/jenkins/spark-tests.sh index 49d5e886c63..91e339dc508 100755 --- a/jenkins/spark-tests.sh +++ b/jenkins/spark-tests.sh @@ -107,6 +107,12 @@ export PATH="$SPARK_HOME/bin:$SPARK_HOME/sbin:$PATH" tar zxf $SPARK_HOME.tgz -C $ARTF_ROOT && \ rm -f $SPARK_HOME.tgz export PYTHONPATH=$SPARK_HOME/python:$SPARK_HOME/python/pyspark/:$SPARK_HOME/python/lib/py4j-0.10.9-src.zip +# Extract 'value' from conda config string 'key: value' +CONDA_ROOT=`conda config --show root_prefix | cut -d ' ' -f2` +PYTHON_VER=`conda config --show default_python | cut -d ' ' -f2` +# Put conda package path ahead of the env 'PYTHONPATH', +# to import the right pandas from conda instead of spark binary path. +export PYTHONPATH="$CONDA_ROOT/lib/python$PYTHON_VER/site-packages:$PYTHONPATH" IS_SPARK_311_OR_LATER=0 [[ "$(printf '%s\n' "3.1.1" "$SPARK_VER" | sort -V | head -n1)" = "3.1.1" ]] && IS_SPARK_311_OR_LATER=1 @@ -276,12 +282,6 @@ fi # cudf_udf_test if [[ "$TEST_MODE" == "ALL" || "$TEST_MODE" == "CUDF_UDF_ONLY" ]]; then - # Extract 'value' from conda config string 'key: value' - CONDA_ROOT=`conda config --show root_prefix | cut -d ' ' -f2` - PYTHON_VER=`conda config --show default_python | cut -d ' ' -f2` - # Put conda package path ahead of the env 'PYTHONPATH', - # to import the right pandas from conda instead of spark binary path. - export PYTHONPATH="$CONDA_ROOT/lib/python$PYTHON_VER/site-packages:$PYTHONPATH" run_test_not_parallel cudf_udf_test fi