diff --git a/integration_tests/README.md b/integration_tests/README.md index d21498ff892..a2b8bf90f94 100644 --- a/integration_tests/README.md +++ b/integration_tests/README.md @@ -171,6 +171,19 @@ any GPU resources on the cluster. For standalone, Mesos, and Kubernetes you can of executors you want to use per application. The extra core is for the driver. Dynamic allocation can mess with these settings under YARN and even though it is off by default you probably want to be sure it is disabled (spark.dynamicAllocation.enabled=false). +### Running with Alternate Paths + +In case your test jars and resources are downloaded to the `local-path` from dependency Repo, and you want to run tests with them +using the shell-script [run_pyspark_from_build.sh](run_pyspark_from_build.sh), then the `LOCAL_JAR_PATH=local-path` must be set to point +to the `local-path`, e.g. `LOCAL_JAR_PATH=local-path bash [run_pyspark_from_build.sh](run_pyspark_from_build.sh)`.By setting `LOCAL_JAR_PATH=local-path` +the shell-script [run_pyspark_from_build.sh](run_pyspark_from_build.sh) can find the test jars and resources in the alternate path. + +When running the shell-script [run_pyspark_from_build.sh](run_pyspark_from_build.sh) under YARN or Kubernetes, the `$SCRIPTPATH` in the python options +`--rootdir $SCRIPTPATH ...` and `--std_input_path $SCRIPTPATH ...` will not work, as the `$SCRIPTPATH` is a local path, you need to overwrite it to the clould paths. +Basically, you need first to upload the test resources onto the cloud path `resource-path`, then transfer the test resources onto the working directory +`root-dir` of each executor(e.g. via `spark-submit --files root-dir ...`). After that you must set both `LOCAL_ROOTDIR=root-dir` and `INPUT_PATH=resource-path` +to run the shell-script, e.g. `LOCAL_ROOTDIR=root-dir INPUT_PATH=resource-path bash [run_pyspark_from_build.sh](run_pyspark_from_build.sh)`. + ### Enabling cudf_udf Tests The cudf_udf tests in this framework are testing Pandas UDF(user-defined function) with cuDF. They are disabled by default not only because of the complicated environment setup, but also because GPU resources scheduling for Pandas UDF is an experimental feature now, the performance may not always be better. diff --git a/integration_tests/run_pyspark_from_build.sh b/integration_tests/run_pyspark_from_build.sh index cb4b213a776..ad7d8c7b180 100755 --- a/integration_tests/run_pyspark_from_build.sh +++ b/integration_tests/run_pyspark_from_build.sh @@ -28,12 +28,12 @@ else # support alternate local jars NOT building from the source code if [ -d "$LOCAL_JAR_PATH" ]; then CUDF_JARS=$(echo "$LOCAL_JAR_PATH"/cudf-*.jar) - PLUGIN_JARS=$(echo "$LOCAL_JAR_PATH"/rapids-4-spark*.jar) + PLUGIN_JARS=$(echo "$LOCAL_JAR_PATH"/rapids-4-spark_*.jar) TEST_JARS=$(echo "$LOCAL_JAR_PATH"/rapids-4-spark-integration-tests*.jar) UDF_EXAMPLE_JARS=$(echo "$LOCAL_JAR_PATH"/rapids-4-spark-udf-examples*.jar) else CUDF_JARS=$(echo "$SCRIPTPATH"/target/dependency/cudf-*.jar) - PLUGIN_JARS=$(echo "$SCRIPTPATH"/../dist/target/rapids-4-spark*.jar) + PLUGIN_JARS=$(echo "$SCRIPTPATH"/../dist/target/rapids-4-spark_*.jar) TEST_JARS=$(echo "$SCRIPTPATH"/target/rapids-4-spark-integration-tests*.jar) UDF_EXAMPLE_JARS=$(echo "$SCRIPTPATH"/../udf-examples/target/rapids-4-spark-udf-examples*.jar) fi @@ -93,6 +93,12 @@ else RUN_DIR="$SCRIPTPATH"/target/run_dir mkdir -p "$RUN_DIR" cd "$RUN_DIR" + + ## Under cloud environment, overwrite the '--rootdir' param to point to the working directory of each excutor + LOCAL_ROOTDIR=${LOCAL_ROOTDIR:-"$SCRIPTPATH"} + ## Under cloud environment, overwrite the '--std_input_path' param to point to the distributed file path + INPUT_PATH=${INPUT_PATH:-"$SCRIPTPATH"} + if [[ "${TEST_PARALLEL_OPTS}" != "" ]]; then export PYSP_TEST_spark_driver_extraClassPath="${ALL_JARS// /:}" @@ -105,10 +111,10 @@ else export PYSP_TEST_spark_rapids_memory_gpu_maxAllocFraction=$MEMORY_FRACTION python \ - "$SCRIPTPATH"/runtests.py --rootdir "$SCRIPTPATH" "$SCRIPTPATH"/src/main/python \ + "$SCRIPTPATH"/runtests.py --rootdir "$LOCAL_ROOTDIR" "$LOCAL_ROOTDIR"/src/main/python \ $TEST_PARALLEL_OPTS \ -v -rfExXs "$TEST_TAGS" \ - --std_input_path="$SCRIPTPATH"/src/test/resources/ \ + --std_input_path="$INPUT_PATH"/src/test/resources/ \ --color=yes \ $TEST_TYPE_PARAM \ "$TEST_ARGS" \ @@ -121,9 +127,9 @@ else --conf 'spark.sql.session.timeZone=UTC' \ --conf 'spark.sql.shuffle.partitions=12' \ $SPARK_SUBMIT_FLAGS \ - "$SCRIPTPATH"/runtests.py --rootdir "$SCRIPTPATH" "$SCRIPTPATH"/src/main/python \ + "$SCRIPTPATH"/runtests.py --rootdir "$LOCAL_ROOTDIR" "$LOCAL_ROOTDIR"/src/main/python \ -v -rfExXs "$TEST_TAGS" \ - --std_input_path="$SCRIPTPATH"/src/test/resources/ \ + --std_input_path="$INPUT_PATH"/src/test/resources/ \ --color=yes \ $TEST_TYPE_PARAM \ "$TEST_ARGS" \