Skip to content

Commit

Permalink
Integration tests on spark 3.0.1-SNAPSHOT & 3.1.0-SNAPSHOT (#335)
Browse files Browse the repository at this point in the history
* Integration tests on spark 3.0.1-SNAPSHOT & 3.10.-SNAPSHOT

Add Jenkinsfile for integration tests on spark 3.0.1-SNAPSHOT & 3.10.-SNAPSHOT

* Fix copyright and spark version anotations

Co-authored-by: Tim Liu <timl@nvidia.com>
  • Loading branch information
NvTimLiu and NvTimLiu authored Jul 13, 2020
1 parent 46758ea commit 743924c
Show file tree
Hide file tree
Showing 3 changed files with 199 additions and 1 deletion.
99 changes: 99 additions & 0 deletions jenkins/Jenkinsfile.301.integration
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
#!/usr/local/env groovy
/*
* Copyright (c) 2020, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/**
*
* Jenkins file for running spark3.0.1 integration tests
*
*/

@Library('shared-libs') _

def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"

pipeline {
agent none

options {
ansiColor('xterm')
timestamps()
timeout(time: 240, unit: 'MINUTES')
buildDiscarder(logRotator(numToKeepStr: '10'))
}

parameters {
string(name: 'OVERWRITE_PARAMS', defaultValue: '',
description: 'parameters format XXX_VER=xxx;YYY_VER=yyy;')
string(name: 'REF', defaultValue: 'branch-0.2', description: 'Commit to build')
}

environment {
JENKINS_ROOT = 'jenkins'
TEST_SCRIPT = '$JENKINS_ROOT/spark-tests.sh'
LIBCUDF_KERNEL_CACHE_PATH='/tmp/.cudf'
ARTIFACTORY_NAME = "${ArtifactoryConstants.ARTIFACTORY_NAME}"
URM_URL = "${urmUrl}"
MVN_URM_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
}

stages {
stage('IT on 3.0.1-SNAPSHOT') {
agent { label 'docker-gpu' }
environment {SPARK_VER='3.0.1-SNAPSHOT'}
steps {
script {
def CUDA_NAME=sh(returnStdout: true,
script: '. jenkins/version-def.sh>&2 && echo -n $CUDA_CLASSIFIER | sed "s/-/./g"')
def IMAGE_NAME="$ARTIFACTORY_NAME/sw-spark-docker/plugin:it-centos7-$CUDA_NAME"
def CUDA_VER="$CUDA_NAME" - "cuda"
sh "docker pull $IMAGE_NAME"
docker.image(IMAGE_NAME).inside("--runtime=nvidia -v ${HOME}/.zinc:${HOME}/.zinc:rw") {
sh "bash $TEST_SCRIPT"
}
}
}
}
} // end of stages
post {
always {
script {
def status = "failed"
if (currentBuild.currentResult == "SUCCESS") {
status = "success"
slack("#rapidsai-spark-cicd", "Success", color: "#33CC33")
}
else {
slack("#rapidsai-spark-cicd", "Failed", color: "#FF0000")
}
}
echo 'Pipeline finished!'
}
}
} // end of pipeline

void slack(Map params = [:], String channel, String message) {
Map defaultParams = [
color: "#000000",
baseUrl: "https://nvidia.slack.com/services/hooks/jenkins-ci/",
tokenCredentialId: "slack_token"
]

params["channel"] = channel
params["message"] = "${BUILD_URL}\n" + message

slackSend(defaultParams << params)
}
99 changes: 99 additions & 0 deletions jenkins/Jenkinsfile.310.integration
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
#!/usr/local/env groovy
/*
* Copyright (c) 2020, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/**
*
* Jenkins file for running spark3.1.0 integration tests
*
*/

@Library('shared-libs') _

def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"

pipeline {
agent none

options {
ansiColor('xterm')
timestamps()
timeout(time: 240, unit: 'MINUTES')
buildDiscarder(logRotator(numToKeepStr: '10'))
}

parameters {
string(name: 'OVERWRITE_PARAMS', defaultValue: '',
description: 'parameters format XXX_VER=xxx;YYY_VER=yyy;')
string(name: 'REF', defaultValue: 'branch-0.2', description: 'Commit to build')
}

environment {
JENKINS_ROOT = 'jenkins'
TEST_SCRIPT = '$JENKINS_ROOT/spark-tests.sh'
LIBCUDF_KERNEL_CACHE_PATH='/tmp/.cudf'
ARTIFACTORY_NAME = "${ArtifactoryConstants.ARTIFACTORY_NAME}"
URM_URL = "${urmUrl}"
MVN_URM_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
}

stages {
stage('IT on 3.1.0-SNAPSHOT') {
agent { label 'docker-gpu' }
environment {SPARK_VER='3.1.0-SNAPSHOT'}
steps {
script {
def CUDA_NAME=sh(returnStdout: true,
script: '. jenkins/version-def.sh>&2 && echo -n $CUDA_CLASSIFIER | sed "s/-/./g"')
def IMAGE_NAME="$ARTIFACTORY_NAME/sw-spark-docker/plugin:it-centos7-$CUDA_NAME"
def CUDA_VER="$CUDA_NAME" - "cuda"
sh "docker pull $IMAGE_NAME"
docker.image(IMAGE_NAME).inside("--runtime=nvidia -v ${HOME}/.zinc:${HOME}/.zinc:rw") {
sh "bash $TEST_SCRIPT"
}
}
}
}
} // end of stages
post {
always {
script {
def status = "failed"
if (currentBuild.currentResult == "SUCCESS") {
status = "success"
slack("#rapidsai-spark-cicd", "Success", color: "#33CC33")
}
else {
slack("#rapidsai-spark-cicd", "Failed", color: "#FF0000")
}
}
echo 'Pipeline finished!'
}
}
} // end of pipeline

void slack(Map params = [:], String channel, String message) {
Map defaultParams = [
color: "#000000",
baseUrl: "https://nvidia.slack.com/services/hooks/jenkins-ci/",
tokenCredentialId: "slack_token"
]

params["channel"] = channel
params["message"] = "${BUILD_URL}\n" + message

slackSend(defaultParams << params)
}
2 changes: 1 addition & 1 deletion jenkins/version-def.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ set -e
PRE_IFS=$IFS
IFS=";"
for VAR in $OVERWRITE_PARAMS;do
echo $VAR && declare $VAR
echo $VAR && export $VAR
done
IFS=$PRE_IFS

Expand Down

0 comments on commit 743924c

Please sign in to comment.