Skip to content

Commit

Permalink
Set spark.executor.cores for parallel tests as well.
Browse files Browse the repository at this point in the history
  • Loading branch information
mythrocks committed Sep 5, 2023
1 parent 97427b7 commit 09c2b33
Showing 1 changed file with 8 additions and 1 deletion.
9 changes: 8 additions & 1 deletion integration_tests/run_pyspark_from_build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,11 @@ else
then
TEST_TAGS="-m $TEST_TAGS"
fi

# Set per-executor cores, if unspecified.
# This prevents per-thread allocations (like Parquet read buffers) from overwhelming the heap.
export PYSP_TEST_spark_executor_cores=${PYSP_TEST_spark_executor_cores:-'10'}

if [[ "${TEST_PARALLEL}" == "" ]];
then
# For integration tests we want to have at least
Expand Down Expand Up @@ -334,6 +339,7 @@ EOF

driverJavaOpts="$PYSP_TEST_spark_driver_extraJavaOptions"
gpuAllocSize="$PYSP_TEST_spark_rapids_memory_gpu_allocSize"
executorCores="$PYSP_TEST_spark_executor_cores"

# avoid double processing of variables passed to spark in
# spark_conf_init
Expand All @@ -343,12 +349,13 @@ EOF
unset PYSP_TEST_spark_jars_packages
unset PYSP_TEST_spark_jars_repositories
unset PYSP_TEST_spark_rapids_memory_gpu_allocSize
unset PYSP_TEST_spark_rapids_executor_cores

exec "$SPARK_HOME"/bin/spark-submit "${jarOpts[@]}" \
--driver-java-options "$driverJavaOpts" \
$SPARK_SUBMIT_FLAGS \
--conf 'spark.rapids.memory.gpu.allocSize='"$gpuAllocSize" \
--conf 'spark.executor.cores='"10" \
--conf 'spark.executor.cores='"$executorCores" \
"${RUN_TESTS_COMMAND[@]}" "${TEST_COMMON_OPTS[@]}"
fi
fi

0 comments on commit 09c2b33

Please sign in to comment.