diff --git a/jenkins/Jenkinsfile.databricksrelease b/jenkins/Jenkinsfile.databricksrelease new file mode 100644 index 00000000000..b111200b2dc --- /dev/null +++ b/jenkins/Jenkinsfile.databricksrelease @@ -0,0 +1,109 @@ +#!/usr/local/env groovy +/* + * Copyright (c) 2020, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** +* +* Jenkinsfile for building and deploy rapids-plugin for Databricks to public repo +* +*/ + +def SERVERS_MAP = [ + Local: ' ', + Sonatype: 'https://oss.sonatype.org/service/local/staging/deploy/maven2' +] + +def SEC_IDS = [ + Local: ['local-gpg-passphrase', 'local-gpg-private-key', 'local-username-password'], + Sonatype: ['rapids-gpg-passphrase', 'rapids-gpg-private-key', 'sonatype-username-password'] +] + +pipeline { + agent { + dockerfile { + label 'vanilla||docker-deploy||docker-gpu' + filename 'Dockerfile.ubuntu16' + dir "jenkins" + args '--runtime=nvidia -v ${HOME}/.m2:${HOME}/.m2:rw \ + -v ${HOME}/.zinc:${HOME}/.zinc:rw \ + -v /etc/passwd:/etc/passwd -v /etc/group:/etc/group' + } + } + + options { + ansiColor('xterm') + timeout(time: 120, unit: 'MINUTES') + buildDiscarder(logRotator(numToKeepStr: '10')) + } + + parameters { + choice(name: 'DEPLOY_TO', choices: ['Sonatype'], + description: 'Where to deploy artifacts to') + string(name: 'REF', defaultValue: 'branch-0.1', description: 'Commit to build') + } + + environment { + JENKINS_ROOT='jenkins' + IMAGE_NAME="urm.nvidia.com/sw-spark-docker/plugin:dev-ubuntu16-cuda10.1" + LIBCUDF_KERNEL_CACHE_PATH='/tmp/.cudf' + MVN_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm' + URM_CREDS = credentials("svcngcc_artifactory") + DATABRICKS_TOKEN = credentials("SPARK_DATABRICKS_TOKEN") + DIST_PL='dist' + SQL_PL='sql-plugin' + } + + stages { + stage('Build') { + steps { + script { + sshagent(credentials : ['svcngcc_pubpriv']) { + sh "mvn versions:set -DnewVersion=0.1.0-databricks && git clean -d -f" + sh "patch -p1 < ./jenkins/databricks/dbimports.patch" + sh "tar -zcvf spark-rapids-ci.tgz * || true" + sh "python3.6 ./jenkins/databricks/run-tests.py -z ./spark-rapids-ci.tgz -t $DATABRICKS_TOKEN -p /home/svcngcc/.ssh/id_rsa -l ./jenkins/databricks/build.sh" + } + } + } + } + stage("Deploy") { + environment { + SERVER_ID='ossrh' + SERVER_URL="${SERVERS_MAP["$DEPLOY_TO"]}" + GPG_PASSPHRASE=credentials("${SEC_IDS["$DEPLOY_TO"][0]}") + GPG_FILE=credentials("${SEC_IDS["$DEPLOY_TO"][1]}") + SONATYPE=credentials("${SEC_IDS["$DEPLOY_TO"][2]}") + GNUPGHOME="${WORKSPACE}/.gnupg" + } + steps { + script { + sh 'rm -rf $GNUPGHOME' + sh 'gpg --import $GPG_FILE' + retry (3) { + sh "bash $JENKINS_ROOT/deploy.sh true true" + } + } + } + } + stage('Cleanup') { + steps { + script { + sh "python3.6 ./jenkins/databricks/shutdown.py -t $DATABRICKS_TOKEN" + } + } + } + } // End of stages +} // end of pipeline diff --git a/jenkins/Jenkinsfile.release b/jenkins/Jenkinsfile.release index b94ce69c17f..26e7883c29e 100644 --- a/jenkins/Jenkinsfile.release +++ b/jenkins/Jenkinsfile.release @@ -91,7 +91,7 @@ pipeline { sh 'rm -rf $GNUPGHOME' sh 'gpg --import $GPG_FILE' retry (3) { - sh "bash $JENKINS_ROOT/deploy.sh true" + sh "bash $JENKINS_ROOT/deploy.sh true false" } } } diff --git a/jenkins/deploy.sh b/jenkins/deploy.sh index eb9cfebd168..45dda0dad38 100755 --- a/jenkins/deploy.sh +++ b/jenkins/deploy.sh @@ -17,10 +17,21 @@ set -e SIGN_FILE=$1 +DATABRICKS=$2 ###### Build the path of jar(s) to be deployed ###### cd $WORKSPACE + +###### Databricks built tgz file so we need to untar and deploy from that +if [ "$DATABRICKS" == true ]; then + rm -rf deploy + mkdir -p deploy + cd deploy + tar -zxvf ../spark-rapids-built.tgz + cd spark-rapids +fi + ART_ID=`mvn exec:exec -q -pl $DIST_PL -Dexec.executable=echo -Dexec.args='${project.artifactId}'` ART_VER=`mvn exec:exec -q -pl $DIST_PL -Dexec.executable=echo -Dexec.args='${project.version}'`