From f059caabeb69ed3383adf9c28be8faa6fb2e6835 Mon Sep 17 00:00:00 2001 From: Jason Lowe Date: Tue, 8 Dec 2020 18:09:31 -0600 Subject: [PATCH] Use UTC instead of GMT Signed-off-by: Jason Lowe --- integration_tests/README.md | 4 ++-- integration_tests/run_pyspark_from_build.sh | 8 ++++---- jenkins/spark-tests.sh | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/integration_tests/README.md b/integration_tests/README.md index 82444459540..56126e9edb2 100644 --- a/integration_tests/README.md +++ b/integration_tests/README.md @@ -83,8 +83,8 @@ To make sure that the tests work properly you need to configure your cluster or The python framework cannot always do this for you because it risks overwriting other java options in the config. Please be sure that the following configs are set when running the tests. - * `spark.driver.extraJavaOptions` should include `-Duser.timezone=GMT` - * `spark.executor.extraJavaOptions` should include `-Duser.timezone=GMT` + * `spark.driver.extraJavaOptions` should include `-Duser.timezone=UTC` + * `spark.executor.extraJavaOptions` should include `-Duser.timezone=UTC` * `spark.sql.session.timeZone`=`UTC` ### Running in parallel diff --git a/integration_tests/run_pyspark_from_build.sh b/integration_tests/run_pyspark_from_build.sh index 22da7be6b74..c580776899a 100755 --- a/integration_tests/run_pyspark_from_build.sh +++ b/integration_tests/run_pyspark_from_build.sh @@ -81,8 +81,8 @@ else if [[ "${TEST_PARALLEL_OPTS}" != "" ]]; then export PYSP_TEST_spark_driver_extraClassPath="${ALL_JARS// /:}" - export PYSP_TEST_spark_driver_extraJavaOptions="-ea -Duser.timezone=GMT $COVERAGE_SUBMIT_FLAGS" - export PYSP_TEST_spark_executor_extraJavaOptions='-ea -Duser.timezone=GMT' + export PYSP_TEST_spark_driver_extraJavaOptions="-ea -Duser.timezone=UTC $COVERAGE_SUBMIT_FLAGS" + export PYSP_TEST_spark_executor_extraJavaOptions='-ea -Duser.timezone=UTC' export PYSP_TEST_spark_ui_showConsoleProgress='false' export PYSP_TEST_spark_sql_session_timeZone='UTC' export PYSP_TEST_spark_sql_shuffle_partitions='12' @@ -100,8 +100,8 @@ else "$@" else "$SPARK_HOME"/bin/spark-submit --jars "${ALL_JARS// /,}" \ - --conf "spark.driver.extraJavaOptions=-ea -Duser.timezone=GMT $COVERAGE_SUBMIT_FLAGS" \ - --conf 'spark.executor.extraJavaOptions=-ea -Duser.timezone=GMT' \ + --conf "spark.driver.extraJavaOptions=-ea -Duser.timezone=UTC $COVERAGE_SUBMIT_FLAGS" \ + --conf 'spark.executor.extraJavaOptions=-ea -Duser.timezone=UTC' \ --conf 'spark.sql.session.timeZone=UTC' \ --conf 'spark.sql.shuffle.partitions=12' \ $SPARK_SUBMIT_FLAGS \ diff --git a/jenkins/spark-tests.sh b/jenkins/spark-tests.sh index 68bfba2fdf3..8dca6a1e9bd 100755 --- a/jenkins/spark-tests.sh +++ b/jenkins/spark-tests.sh @@ -71,8 +71,8 @@ BASE_SPARK_SUBMIT_ARGS="--master spark://$HOSTNAME:7077 \ --conf spark.sql.shuffle.partitions=12 \ --conf spark.driver.extraClassPath=${CUDF_JAR}:${RAPIDS_PLUGIN_JAR} \ --conf spark.executor.extraClassPath=${CUDF_JAR}:${RAPIDS_PLUGIN_JAR} \ - --conf spark.driver.extraJavaOptions=-Duser.timezone=GMT \ - --conf spark.executor.extraJavaOptions=-Duser.timezone=GMT \ + --conf spark.driver.extraJavaOptions=-Duser.timezone=UTC \ + --conf spark.executor.extraJavaOptions=-Duser.timezone=UTC \ --conf spark.sql.session.timeZone=UTC" MORTGAGE_SPARK_SUBMIT_ARGS=" --conf spark.plugins=com.nvidia.spark.SQLPlugin \ --class com.nvidia.spark.rapids.tests.mortgage.Main \