From 4c73cb2befbec917983a13b5ffa49037282ad22a Mon Sep 17 00:00:00 2001 From: NvTimLiu <50287591+NvTimLiu@users.noreply.github.com> Date: Fri, 18 Sep 2020 09:02:45 +0800 Subject: [PATCH] Update Jenkins scripts for release (#793) * Update Jenkins scripts for release 1, Deploy integration_tests jar onto internal maven repo 2, Purge the Databricks shims jar to make sure downloading the latest version of Databricks shims jar 3, Get version info via 'mvn help:evaluate', since 'mvn exec:exec' doesn't work for the shade project here. Signed-off-by: Tim Liu * Remove -P optons from both path of the mvn depley cmds Co-authored-by: Tim Liu --- jenkins/Jenkinsfile.databricksrelease | 110 ++++++++++++++++++++++++++ jenkins/Jenkinsfile.release | 4 +- jenkins/databricks/build.sh | 1 + jenkins/deploy.sh | 19 +++-- 4 files changed, 127 insertions(+), 7 deletions(-) create mode 100644 jenkins/Jenkinsfile.databricksrelease diff --git a/jenkins/Jenkinsfile.databricksrelease b/jenkins/Jenkinsfile.databricksrelease new file mode 100644 index 00000000000..332dde39a84 --- /dev/null +++ b/jenkins/Jenkinsfile.databricksrelease @@ -0,0 +1,110 @@ +#!/usr/local/env groovy +/* + * Copyright (c) 2020, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* +* Jenkinsfile for building rapids-plugin on Databricks +* +*/ +@Library(['shared-libs', 'spark-jenkins-shared-lib']) _ + +def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven" + +pipeline { + agent { + docker { + label 'docker-gpu' + image "${ArtifactoryConstants.ARTIFACTORY_NAME}/sw-spark-docker/plugin:dev-ubuntu16-cuda10.1" + args '--runtime=nvidia -v ${HOME}/.m2:${HOME}/.m2:rw \ + -v ${HOME}/.zinc:${HOME}/.zinc:rw' + } + } + + options { + ansiColor('xterm') + timeout(time: 180, unit: 'MINUTES') + buildDiscarder(logRotator(numToKeepStr: '10')) + } + + parameters { + choice(name: 'DEPLOY_TO', choices: ['Local', 'Urm'], + description: 'Where to deploy artifacts to') + string(name: 'DATABRICKS_VERSION', + defaultValue: '0.2.0', description: 'Version to set') + string(name: 'CUDF_VERSION', + defaultValue: '0.15', description: 'Cudf version to use') + string(name: 'CUDA_VERSION', + defaultValue: 'cuda10-1', description: 'cuda version to use') + string(name: 'CLUSTER_ID', + defaultValue: '0828-071715-knack867', description: 'databricks cluster id') + string(name: 'REF', defaultValue: 'main', description: 'Commit to build') + } + + environment { + JENKINS_ROOT = 'jenkins' + MVN_URM_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm' + LIBCUDF_KERNEL_CACHE_PATH='/tmp' + URM_CREDS = credentials("svcngcc_artifactory") + DATABRICKS_TOKEN = credentials("TIM_DATABRICKS_TOKEN") + DATABRICKS_SECRET = credentials("TIM_DATABRICKS_SECRET") + SCALA_VERSION = '2.12' + SPARK_VERSION = '3.0.0-databricks' + CI_RAPIDS_JAR = 'rapids-4-spark_2.12-0.1-SNAPSHOT-ci.jar' + CI_CUDF_JAR = 'cudf-0.14-cuda10-1.jar' + URM_URL = "${urmUrl}" + } + + stages { + stage('Ubuntu16 CUDA10.1') { + steps { + script { + sshagent(credentials : ['svcngcc_pubpriv']) { + sh "rm -rf spark-rapids-ci.tgz" + sh "tar -zvcf spark-rapids-ci.tgz *" + sh "python3.6 ./jenkins/databricks/run-tests.py -c $CLUSTER_ID -z ./spark-rapids-ci.tgz -t $DATABRICKS_TOKEN -p $DATABRICKS_SECRET -l ./jenkins/databricks/build.sh -j $CI_RAPIDS_JAR -b $DATABRICKS_VERSION -k $SPARK_VERSION -a $SCALA_VERSION -f $CUDF_VERSION -u $CUDA_VERSION -m $CI_CUDF_JAR" + sh "./jenkins/databricks/deploy.sh" + } + } + } + } + } // end of stages + post { + always { + script { + sh "python3.6 ./jenkins/databricks/shutdown.py -c $CLUSTER_ID -t $DATABRICKS_TOKEN || true" + if (currentBuild.currentResult == "SUCCESS") { + slack("#swrapids-spark-cicd", "Success", color: "#33CC33") + } else { + slack("#swrapids-spark-cicd", "Failed", color: "#FF0000") + } + } + } + } +} // end of pipeline + +void slack(Map params = [:], String channel, String message) { + Map defaultParams = [ + color: "#000000", + baseUrl: "${SparkConstants.SLACK_API_ENDPOINT}", + tokenCredentialId: "slack_token" + ] + + params["channel"] = channel + params["message"] = "${BUILD_URL}\n" + message + + slackSend(defaultParams << params) +} diff --git a/jenkins/Jenkinsfile.release b/jenkins/Jenkinsfile.release index 69a55c82d80..8ff8c49a6a7 100644 --- a/jenkins/Jenkinsfile.release +++ b/jenkins/Jenkinsfile.release @@ -46,6 +46,7 @@ pipeline { MVN_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm' DIST_PL='dist' SQL_PL='sql-plugin' + TESTS_PL='integration_tests' URM_URL = "${urmUrl}" } @@ -57,7 +58,8 @@ pipeline { sh "mkdir -p ${HOME}/.zinc" docker.image("$IMAGE_NAME").inside("--runtime=nvidia -v ${HOME}/.m2:${HOME}/.m2:rw \ -v ${HOME}/.zinc:${HOME}/.zinc:rw") { - sh "mvn -U -B clean install $MVN_MIRROR -P 'source-javadoc,!snapshot-shims'" + sh "mvn dependency:purge-local-repository -DmanualInclude='com.nvidia:rapids-4-spark-shims-spark300-databricks_2.12' \ + -U -B clean install $MVN_MIRROR -P 'include-databricks,source-javadoc,!snapshot-shims'" } } } diff --git a/jenkins/databricks/build.sh b/jenkins/databricks/build.sh index 5b9bb401c40..ac0b53eea5f 100755 --- a/jenkins/databricks/build.sh +++ b/jenkins/databricks/build.sh @@ -41,6 +41,7 @@ rm -rf spark-rapids mkdir spark-rapids tar -zxvf $SPARKTGZ -C spark-rapids cd spark-rapids +export WORKSPACE=`pwd` mvn -B '-Pdatabricks,!snapshot-shims' clean package -DskipTests || true M2DIR=/home/ubuntu/.m2/repository CUDF_JAR=${M2DIR}/ai/rapids/cudf/${CUDF_VERSION}/cudf-${CUDF_VERSION}-${CUDA_VERSION}.jar diff --git a/jenkins/deploy.sh b/jenkins/deploy.sh index 4ac0777e0fe..1572c7ac5e6 100755 --- a/jenkins/deploy.sh +++ b/jenkins/deploy.sh @@ -44,8 +44,8 @@ if [ "$DATABRICKS" == true ]; then cd spark-rapids fi -ART_ID=`mvn exec:exec -q -pl $DIST_PL -Dexec.executable=echo -Dexec.args='${project.artifactId}'` -ART_VER=`mvn exec:exec -q -pl $DIST_PL -Dexec.executable=echo -Dexec.args='${project.version}'` +ART_ID=`mvn help:evaluate -q -pl $DIST_PL -Dexpression=project.artifactId -DforceStdout` +ART_VER=`mvn help:evaluate -q -pl $DIST_PL -Dexpression=project.version -DforceStdout` FPATH="$DIST_PL/target/$ART_ID-$ART_VER" @@ -56,13 +56,13 @@ echo "Plan to deploy ${FPATH}.jar to $SERVER_URL (ID:$SERVER_ID)" if [ "$SIGN_FILE" == true ]; then # No javadoc and sources jar is generated for shade artifact only. Use 'sql-plugin' instead - SQL_ART_ID=`mvn exec:exec -q -pl $SQL_PL -Dexec.executable=echo -Dexec.args='${project.artifactId}'` - SQL_ART_VER=`mvn exec:exec -q -pl $SQL_PL -Dexec.executable=echo -Dexec.args='${project.version}'` + SQL_ART_ID=`mvn help:evaluate -q -pl $SQL_PL -Dexpression=project.artifactId -DforceStdout` + SQL_ART_VER=`mvn help:evaluate -q -pl $SQL_PL -Dexpression=project.version -DforceStdout` JS_FPATH="${SQL_PL}/target/${SQL_ART_ID}-${SQL_ART_VER}" SRC_DOC_JARS="-Dsources=${JS_FPATH}-sources.jar -Djavadoc=${JS_FPATH}-javadoc.jar" - DEPLOY_CMD="mvn -B '-Pinclude-databricks,!snapshot-shims' gpg:sign-and-deploy-file -s jenkins/settings.xml -Dgpg.passphrase=$GPG_PASSPHRASE" + DEPLOY_CMD="mvn -B gpg:sign-and-deploy-file -s jenkins/settings.xml -Dgpg.passphrase=$GPG_PASSPHRASE" else - DEPLOY_CMD="mvn -B '-Pinclude-databricks,!snapshot-shims' deploy:deploy-file -s jenkins/settings.xml" + DEPLOY_CMD="mvn -B deploy:deploy-file -s jenkins/settings.xml" fi echo "Deploy CMD: $DEPLOY_CMD" @@ -79,3 +79,10 @@ $DEPLOY_CMD -Durl=$SERVER_URL -DrepositoryId=$SERVER_ID \ $DEPLOY_CMD -Durl=$SERVER_URL -DrepositoryId=$SERVER_ID \ $SRC_DOC_JARS \ -Dfile=$FPATH.jar -DpomFile=${DIST_PL}/dependency-reduced-pom.xml + +###### Deploy integration tests jar(s) ###### +TESTS_ART_ID=`mvn help:evaluate -q -pl $TESTS_PL -Dexpression=project.artifactId -DforceStdout` +TESTS_ART_VER=`mvn help:evaluate -q -pl $TESTS_PL -Dexpression=project.version -DforceStdout` +TESTS_FPATH="$TESTS_PL/target/$TESTS_ART_ID-$TESTS_ART_VER" +$DEPLOY_CMD -Durl=$SERVER_URL -DrepositoryId=$SERVER_ID \ + -Dfile=$TESTS_FPATH.jar -DpomFile=${TESTS_PL}/pom.xml