Skip to content

Commit

Permalink
Merge pull request NVIDIA#801 from NVIDIA/branch-0.2
Browse files Browse the repository at this point in the history
[auto-merge] branch-0.2 to branch-0.3 [skip ci] [bot]
  • Loading branch information
nvauto authored Sep 18, 2020
2 parents ed754ba + 4c73cb2 commit d145efe
Show file tree
Hide file tree
Showing 4 changed files with 127 additions and 7 deletions.
110 changes: 110 additions & 0 deletions jenkins/Jenkinsfile.databricksrelease
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
#!/usr/local/env groovy
/*
* Copyright (c) 2020, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/**
*
* Jenkinsfile for building rapids-plugin on Databricks
*
*/
@Library(['shared-libs', 'spark-jenkins-shared-lib']) _

def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"

pipeline {
agent {
docker {
label 'docker-gpu'
image "${ArtifactoryConstants.ARTIFACTORY_NAME}/sw-spark-docker/plugin:dev-ubuntu16-cuda10.1"
args '--runtime=nvidia -v ${HOME}/.m2:${HOME}/.m2:rw \
-v ${HOME}/.zinc:${HOME}/.zinc:rw'
}
}

options {
ansiColor('xterm')
timeout(time: 180, unit: 'MINUTES')
buildDiscarder(logRotator(numToKeepStr: '10'))
}

parameters {
choice(name: 'DEPLOY_TO', choices: ['Local', 'Urm'],
description: 'Where to deploy artifacts to')
string(name: 'DATABRICKS_VERSION',
defaultValue: '0.2.0', description: 'Version to set')
string(name: 'CUDF_VERSION',
defaultValue: '0.15', description: 'Cudf version to use')
string(name: 'CUDA_VERSION',
defaultValue: 'cuda10-1', description: 'cuda version to use')
string(name: 'CLUSTER_ID',
defaultValue: '0828-071715-knack867', description: 'databricks cluster id')
string(name: 'REF', defaultValue: 'main', description: 'Commit to build')
}

environment {
JENKINS_ROOT = 'jenkins'
MVN_URM_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
LIBCUDF_KERNEL_CACHE_PATH='/tmp'
URM_CREDS = credentials("svcngcc_artifactory")
DATABRICKS_TOKEN = credentials("TIM_DATABRICKS_TOKEN")
DATABRICKS_SECRET = credentials("TIM_DATABRICKS_SECRET")
SCALA_VERSION = '2.12'
SPARK_VERSION = '3.0.0-databricks'
CI_RAPIDS_JAR = 'rapids-4-spark_2.12-0.1-SNAPSHOT-ci.jar'
CI_CUDF_JAR = 'cudf-0.14-cuda10-1.jar'
URM_URL = "${urmUrl}"
}

stages {
stage('Ubuntu16 CUDA10.1') {
steps {
script {
sshagent(credentials : ['svcngcc_pubpriv']) {
sh "rm -rf spark-rapids-ci.tgz"
sh "tar -zvcf spark-rapids-ci.tgz *"
sh "python3.6 ./jenkins/databricks/run-tests.py -c $CLUSTER_ID -z ./spark-rapids-ci.tgz -t $DATABRICKS_TOKEN -p $DATABRICKS_SECRET -l ./jenkins/databricks/build.sh -j $CI_RAPIDS_JAR -b $DATABRICKS_VERSION -k $SPARK_VERSION -a $SCALA_VERSION -f $CUDF_VERSION -u $CUDA_VERSION -m $CI_CUDF_JAR"
sh "./jenkins/databricks/deploy.sh"
}
}
}
}
} // end of stages
post {
always {
script {
sh "python3.6 ./jenkins/databricks/shutdown.py -c $CLUSTER_ID -t $DATABRICKS_TOKEN || true"
if (currentBuild.currentResult == "SUCCESS") {
slack("#swrapids-spark-cicd", "Success", color: "#33CC33")
} else {
slack("#swrapids-spark-cicd", "Failed", color: "#FF0000")
}
}
}
}
} // end of pipeline

void slack(Map params = [:], String channel, String message) {
Map defaultParams = [
color: "#000000",
baseUrl: "${SparkConstants.SLACK_API_ENDPOINT}",
tokenCredentialId: "slack_token"
]

params["channel"] = channel
params["message"] = "${BUILD_URL}\n" + message

slackSend(defaultParams << params)
}
4 changes: 3 additions & 1 deletion jenkins/Jenkinsfile.release
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ pipeline {
MVN_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
DIST_PL='dist'
SQL_PL='sql-plugin'
TESTS_PL='integration_tests'
URM_URL = "${urmUrl}"
}

Expand All @@ -57,7 +58,8 @@ pipeline {
sh "mkdir -p ${HOME}/.zinc"
docker.image("$IMAGE_NAME").inside("--runtime=nvidia -v ${HOME}/.m2:${HOME}/.m2:rw \
-v ${HOME}/.zinc:${HOME}/.zinc:rw") {
sh "mvn -U -B clean install $MVN_MIRROR -P 'source-javadoc,!snapshot-shims'"
sh "mvn dependency:purge-local-repository -DmanualInclude='com.nvidia:rapids-4-spark-shims-spark300-databricks_2.12' \
-U -B clean install $MVN_MIRROR -P 'include-databricks,source-javadoc,!snapshot-shims'"
}
}
}
Expand Down
1 change: 1 addition & 0 deletions jenkins/databricks/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ rm -rf spark-rapids
mkdir spark-rapids
tar -zxvf $SPARKTGZ -C spark-rapids
cd spark-rapids
export WORKSPACE=`pwd`
mvn -B '-Pdatabricks,!snapshot-shims' clean package -DskipTests || true
M2DIR=/home/ubuntu/.m2/repository
CUDF_JAR=${M2DIR}/ai/rapids/cudf/${CUDF_VERSION}/cudf-${CUDF_VERSION}-${CUDA_VERSION}.jar
Expand Down
19 changes: 13 additions & 6 deletions jenkins/deploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ if [ "$DATABRICKS" == true ]; then
cd spark-rapids
fi

ART_ID=`mvn exec:exec -q -pl $DIST_PL -Dexec.executable=echo -Dexec.args='${project.artifactId}'`
ART_VER=`mvn exec:exec -q -pl $DIST_PL -Dexec.executable=echo -Dexec.args='${project.version}'`
ART_ID=`mvn help:evaluate -q -pl $DIST_PL -Dexpression=project.artifactId -DforceStdout`
ART_VER=`mvn help:evaluate -q -pl $DIST_PL -Dexpression=project.version -DforceStdout`

FPATH="$DIST_PL/target/$ART_ID-$ART_VER"

Expand All @@ -56,13 +56,13 @@ echo "Plan to deploy ${FPATH}.jar to $SERVER_URL (ID:$SERVER_ID)"

if [ "$SIGN_FILE" == true ]; then
# No javadoc and sources jar is generated for shade artifact only. Use 'sql-plugin' instead
SQL_ART_ID=`mvn exec:exec -q -pl $SQL_PL -Dexec.executable=echo -Dexec.args='${project.artifactId}'`
SQL_ART_VER=`mvn exec:exec -q -pl $SQL_PL -Dexec.executable=echo -Dexec.args='${project.version}'`
SQL_ART_ID=`mvn help:evaluate -q -pl $SQL_PL -Dexpression=project.artifactId -DforceStdout`
SQL_ART_VER=`mvn help:evaluate -q -pl $SQL_PL -Dexpression=project.version -DforceStdout`
JS_FPATH="${SQL_PL}/target/${SQL_ART_ID}-${SQL_ART_VER}"
SRC_DOC_JARS="-Dsources=${JS_FPATH}-sources.jar -Djavadoc=${JS_FPATH}-javadoc.jar"
DEPLOY_CMD="mvn -B '-Pinclude-databricks,!snapshot-shims' gpg:sign-and-deploy-file -s jenkins/settings.xml -Dgpg.passphrase=$GPG_PASSPHRASE"
DEPLOY_CMD="mvn -B gpg:sign-and-deploy-file -s jenkins/settings.xml -Dgpg.passphrase=$GPG_PASSPHRASE"
else
DEPLOY_CMD="mvn -B '-Pinclude-databricks,!snapshot-shims' deploy:deploy-file -s jenkins/settings.xml"
DEPLOY_CMD="mvn -B deploy:deploy-file -s jenkins/settings.xml"
fi

echo "Deploy CMD: $DEPLOY_CMD"
Expand All @@ -79,3 +79,10 @@ $DEPLOY_CMD -Durl=$SERVER_URL -DrepositoryId=$SERVER_ID \
$DEPLOY_CMD -Durl=$SERVER_URL -DrepositoryId=$SERVER_ID \
$SRC_DOC_JARS \
-Dfile=$FPATH.jar -DpomFile=${DIST_PL}/dependency-reduced-pom.xml

###### Deploy integration tests jar(s) ######
TESTS_ART_ID=`mvn help:evaluate -q -pl $TESTS_PL -Dexpression=project.artifactId -DforceStdout`
TESTS_ART_VER=`mvn help:evaluate -q -pl $TESTS_PL -Dexpression=project.version -DforceStdout`
TESTS_FPATH="$TESTS_PL/target/$TESTS_ART_ID-$TESTS_ART_VER"
$DEPLOY_CMD -Durl=$SERVER_URL -DrepositoryId=$SERVER_ID \
-Dfile=$TESTS_FPATH.jar -DpomFile=${TESTS_PL}/pom.xml

0 comments on commit d145efe

Please sign in to comment.