Skip to content

Commit

Permalink
Update Outdated GCP getting started guide[skip ci] (#5266)
Browse files Browse the repository at this point in the history
* plugin version is out of date, update the versions and remove some configs

Signed-off-by: liyuan <yuali@nvidia.com>

* test markdown check issue

Signed-off-by: liyuan <yuali@nvidia.com>

* revert markdown links checker changes

Signed-off-by: liyuan <yuali@nvidia.com>

* update copyright

Signed-off-by: liyuan <yuali@nvidia.com>
  • Loading branch information
nvliyuan authored Apr 24, 2022
1 parent 54a7c3e commit f772c40
Showing 1 changed file with 5 additions and 19 deletions.
24 changes: 5 additions & 19 deletions docs/get-started/gpu_dataproc_packages_ubuntu_sample.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -126,12 +126,11 @@ function configure_gpu_isolation() {
chmod a+rwx -R /sys/fs/cgroup/devices
}

readonly RAPIDS_VERSION="0.19"
readonly DEFAULT_SPARK_RAPIDS_VERSION="0.5.0"
readonly DEFAULT_SPARK_RAPIDS_VERSION="22.02.0"
readonly DEFAULT_CUDA_VERSION="11.0"
readonly DEFAULT_CUDF_VERSION="0.19.2"
readonly DEFAULT_XGBOOST_VERSION="1.3.0"
readonly DEFAULT_XGBOOST_GPU_SUB_VERSION="0.1.0"
readonly DEFAULT_CUDF_VERSION="22.02.0"
readonly DEFAULT_XGBOOST_VERSION="1.4.2"
readonly DEFAULT_XGBOOST_GPU_SUB_VERSION="0.2.0"
readonly SPARK_VERSION="3.0"

readonly CUDF_VERSION=${DEFAULT_CUDF_VERSION}
Expand Down Expand Up @@ -168,23 +167,10 @@ function configure_spark() {
# query explain output won't show GPU operator, if user have doubt
# they can uncomment the line before to see the GPU plan explan, but AQE on give user the best performance.
# spark.sql.adaptive.enabled=false
spark.rapids.sql.concurrentGpuTasks=2
spark.executor.resource.gpu.amount=1
spark.executor.cores=4
spark.executor.memory=8G
spark.task.cpus=1
spark.task.resource.gpu.amount=0.25
spark.rapids.memory.pinnedPool.size=2G
spark.executor.memoryOverhead=2G
spark.plugins=com.nvidia.spark.SQLPlugin
spark.executor.extraJavaOptions='-Dai.rapids.cudf.prefer-pinned=true'
spark.locality.wait=0s
spark.executor.resource.gpu.discoveryScript=/usr/lib/spark/scripts/gpu/getGpusResources.sh
spark.sql.shuffle.partitions=48
spark.sql.files.maxPartitionBytes=512m
spark.submit.pyFiles=/usr/lib/spark/jars/xgboost4j-spark_${SPARK_VERSION}-${XGBOOST_VERSION}-${XGBOOST_GPU_SUB_VERSION}.jar
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
###### END : RAPIDS properties for Spark ${SPARK_VERSION} ######
EOF
}
Expand Down

0 comments on commit f772c40

Please sign in to comment.