Skip to content

Commit

Permalink
Hide private info in Jenkins scripts (#330)
Browse files Browse the repository at this point in the history
Hide urm maven/docker repo address in Jenkins scripts

To build on a local host referring to urm repo, `URM_URL` env is needed along with `-s jenkins/settings.xml`, e.g.

        export URM_URL=https://urm.nvidia.com:443/artifactory/sw-spark-maven
        mvn package -s jenkins/settings.xml

Co-authored-by: Tim Liu <timl@nvidia.com>
  • Loading branch information
NvTimLiu and NvTimLiu authored Jul 8, 2020
1 parent 1e8c4fd commit 7376c47
Show file tree
Hide file tree
Showing 13 changed files with 70 additions and 50 deletions.
3 changes: 2 additions & 1 deletion jenkins/Dockerfile.integration.centos7
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ RUN yum update -y && \
# The default mvn verision is 3.05 on centos7 docker container.
# The plugin: net.alchim31.maven requires a higher mvn version.
ENV MAVEN_HOME "/usr/local/apache-maven-3.6.3"
RUN wget https://urm.nvidia.com/artifactory/sw-spark-maven/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.tar.gz -P /usr/local && \
ARG URM_URL
RUN wget ${URM_URL}/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.tar.gz -P /usr/local && \
tar xzvf $MAVEN_HOME-bin.tar.gz -C /usr/local && \
rm -f $MAVEN_HOME-bin.tar.gz

Expand Down
6 changes: 5 additions & 1 deletion jenkins/Jenkinsfile.databricksnightly
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,15 @@
* Jenkinsfile for building rapids-plugin on Databricks
*
*/
@Library('shared-libs') _

def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"

pipeline {
agent {
docker {
label 'docker-gpu'
image 'urm.nvidia.com/sw-spark-docker/plugin:dev-ubuntu16-cuda10.1'
image "${ArtifactoryConstants.ARTIFACTORY_NAME}/sw-spark-docker/plugin:dev-ubuntu16-cuda10.1"
args '--runtime=nvidia -v ${HOME}/.m2:${HOME}/.m2:rw \
-v ${HOME}/.zinc:${HOME}/.zinc:rw'
}
Expand Down Expand Up @@ -61,6 +64,7 @@ pipeline {
SPARK_VERSION = '3.0.0'
CI_RAPIDS_JAR = 'rapids-4-spark_2.12-0.1-SNAPSHOT-ci.jar'
CI_CUDF_JAR = 'cudf-0.14-cuda10-1.jar'
URM_URL = "${urmUrl}"
}

triggers {
Expand Down
5 changes: 4 additions & 1 deletion jenkins/Jenkinsfile.databricksrelease
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@
* Jenkinsfile for building and deploy rapids-plugin for Databricks to public repo
*
*/
@Library('shared-libs') _

def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"

def SERVERS_MAP = [
Local: ' ',
Expand Down Expand Up @@ -62,7 +65,6 @@ pipeline {

environment {
JENKINS_ROOT='jenkins'
IMAGE_NAME="urm.nvidia.com/sw-spark-docker/plugin:dev-ubuntu16-cuda10.1"
LIBCUDF_KERNEL_CACHE_PATH='/tmp/.cudf'
MVN_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
URM_CREDS = credentials("svcngcc_artifactory")
Expand All @@ -73,6 +75,7 @@ pipeline {
SPARK_VERSION = '3.0.0'
CI_RAPIDS_JAR = 'rapids-4-spark_2.12-0.1-SNAPSHOT-ci.jar'
CI_CUDF_JAR = 'cudf-0.14-cuda10-1.jar'
LOCAL_URL = "${localUrl}"
}

stages {
Expand Down
26 changes: 16 additions & 10 deletions jenkins/Jenkinsfile.integration
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@
*
*/

@Library('shared-libs') _

def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"

pipeline {
agent none

Expand All @@ -42,8 +46,11 @@ pipeline {
environment {
JENKINS_ROOT = 'jenkins'
TEST_SCRIPT = '$JENKINS_ROOT/spark-tests.sh'
LIBCUDF_KERNEL_CACHE_PATH='/tmp'
LIBCUDF_KERNEL_CACHE_PATH='/tmp/.cudf'
URM_CREDS = credentials("svcngcc_artifactory")
ARTIFACTORY_NAME = "${ArtifactoryConstants.ARTIFACTORY_NAME}"
URM_URL = "${urmUrl}"
MVN_URM_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
}

stages {
Expand All @@ -53,16 +60,15 @@ pipeline {
script {
def CUDA_NAME=sh(returnStdout: true,
script: '. jenkins/version-def.sh>&2 && echo -n $CUDA_CLASSIFIER | sed "s/-/./g"')
def IMAGE_NAME="urm.nvidia.com/sw-spark-docker/plugin:it-centos7-$CUDA_NAME"
def IMAGE_NAME="$ARTIFACTORY_NAME/sw-spark-docker/plugin:it-centos7-$CUDA_NAME"
def CUDA_VER="$CUDA_NAME" - "cuda"
def urmImage=docker.image(IMAGE_NAME)
urmImage.pull()
// docker inspect returns "sha256:urmImageID"
def urmImageID=sh(returnStdout: true, script: "docker inspect -f {{'.Id'}} $IMAGE_NAME") - "sha256:"
sh "docker pull $IMAGE_NAME"
def urmImageID=sh(returnStdout: true, script: "docker inspect -f {{'.Id'}} $IMAGE_NAME")
// Speed up Docker building via '--cache-from $IMAGE_NAME'
def buildImage=docker.build(IMAGE_NAME,
"-f jenkins/Dockerfile.integration.centos7 --build-arg CUDA_VER=$CUDA_VER --cache-from $IMAGE_NAME -t $IMAGE_NAME .")
def buildImageID=sh(returnStdout: true, script: "docker inspect -f {{'.Id'}} $IMAGE_NAME") - "sha256:"
"-f jenkins/Dockerfile.integration.centos7 --build-arg CUDA_VER=$CUDA_VER \
--build-arg URM_URL=$URM_URL --cache-from $IMAGE_NAME -t $IMAGE_NAME .")
def buildImageID=sh(returnStdout: true, script: "docker inspect -f {{'.Id'}} $IMAGE_NAME")
if (! buildImageID.equals(urmImageID)) {
echo "Dockerfile updated, upload docker image to URM"
uploadDocker(IMAGE_NAME)
Expand Down Expand Up @@ -96,9 +102,9 @@ pipeline {
void uploadDocker(String IMAGE_NAME) {
def DOCKER_CMD="docker --config $WORKSPACE/.docker"
sh """
echo $URM_CREDS_PSW | $DOCKER_CMD login https://urm.nvidia.com -u $URM_CREDS_USR --password-stdin
echo $URM_CREDS_PSW | $DOCKER_CMD login $ARTIFACTORY_NAME -u $URM_CREDS_USR --password-stdin
$DOCKER_CMD push $IMAGE_NAME
$DOCKER_CMD logout https://urm.nvidia.com
$DOCKER_CMD logout $ARTIFACTORY_NAME
"""
}

Expand Down
23 changes: 12 additions & 11 deletions jenkins/Jenkinsfile.nightly
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@
* Jenkinsfile for building rapids-plugin
*
*/
@Library('shared-libs') _

def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"

pipeline {
agent { label 'vanilla' }
Expand All @@ -31,16 +34,16 @@ pipeline {
}

parameters {
choice(name: 'DEPLOY_TO', choices: ['Urm', 'Local'],
description: 'Where to deploy artifacts to')
string(name: 'REF', defaultValue: 'branch-0.2', description: 'Commit to build')
}

environment {
JENKINS_ROOT = 'jenkins'
MVN_URM_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
LIBCUDF_KERNEL_CACHE_PATH='/tmp'
LIBCUDF_KERNEL_CACHE_PATH='/tmp/.cudf'
URM_CREDS = credentials("svcngcc_artifactory")
ARTIFACTORY_NAME = "${ArtifactoryConstants.ARTIFACTORY_NAME}"
URM_URL = "${urmUrl}"
}

triggers {
Expand All @@ -54,16 +57,14 @@ pipeline {
script {
def CUDA_NAME=sh(returnStdout: true,
script: '. jenkins/version-def.sh>&2 && echo -n $CUDA_CLASSIFIER | sed "s/-/./g"')
def IMAGE_NAME="urm.nvidia.com/sw-spark-docker/plugin:dev-ubuntu16-$CUDA_NAME"
def IMAGE_NAME="$ARTIFACTORY_NAME/sw-spark-docker/plugin:dev-ubuntu16-$CUDA_NAME"
def CUDA_VER="$CUDA_NAME" - "cuda"
def urmImage=docker.image(IMAGE_NAME)
urmImage.pull()
// docker inspect returns "sha256:urmImageID"
def urmImageID=sh(returnStdout: true, script: "docker inspect -f {{'.Id'}} $IMAGE_NAME") - "sha256:"
sh "docker pull $IMAGE_NAME"
def urmImageID=sh(returnStdout: true, script: "docker inspect -f {{'.Id'}} $IMAGE_NAME")
// Speed up Docker building via '--cache-from $IMAGE_NAME'
def buildImage=docker.build(IMAGE_NAME,
"-f jenkins/Dockerfile.ubuntu16 --build-arg CUDA_VER=$CUDA_VER --cache-from $IMAGE_NAME -t $IMAGE_NAME .")
def buildImageID=sh(returnStdout: true, script: "docker inspect -f {{'.Id'}} $IMAGE_NAME") - "sha256:"
def buildImageID=sh(returnStdout: true, script: "docker inspect -f {{'.Id'}} $IMAGE_NAME")
if (! buildImageID.equals(urmImageID)) {
echo "Dockerfile updated, upload docker image to URM"
uploadDocker(IMAGE_NAME)
Expand Down Expand Up @@ -98,9 +99,9 @@ pipeline {
void uploadDocker(String IMAGE_NAME) {
def DOCKER_CMD="docker --config $WORKSPACE/.docker"
sh """
echo $URM_CREDS_PSW | $DOCKER_CMD login https://urm.nvidia.com -u $URM_CREDS_USR --password-stdin
echo $URM_CREDS_PSW | $DOCKER_CMD login $ARTIFACTORY_NAME -u $URM_CREDS_USR --password-stdin
$DOCKER_CMD push $IMAGE_NAME
$DOCKER_CMD logout https://urm.nvidia.com
$DOCKER_CMD logout $ARTIFACTORY_NAME
"""
}

Expand Down
11 changes: 8 additions & 3 deletions jenkins/Jenkinsfile.premerge
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@
* Jenkinsfile for building rapids-plugin
*
*/
@Library('shared-libs') _

def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"

pipeline {
agent { label 'vanilla' }
Expand All @@ -38,7 +41,9 @@ pipeline {
JENKINS_ROOT = 'jenkins'
BUILD_SCRIPT = '$JENKINS_ROOT/spark-premerge-build.sh'
MVN_URM_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
LIBCUDF_KERNEL_CACHE_PATH='/tmp'
LIBCUDF_KERNEL_CACHE_PATH='/tmp/.cudf'
ARTIFACTORY_NAME = "${ArtifactoryConstants.ARTIFACTORY_NAME}"
URM_URL = "${urmUrl}"
}

stages {
Expand All @@ -48,13 +53,13 @@ pipeline {
script {
def CUDA_NAME=sh(returnStdout: true,
script: '. jenkins/version-def.sh>&2 && echo -n $CUDA_CLASSIFIER | sed "s/-/./g"')
def IMAGE_NAME="urm.nvidia.com/sw-spark-docker/plugin:dev-ubuntu16-$CUDA_NAME"
def IMAGE_NAME="$ARTIFACTORY_NAME/sw-spark-docker/plugin:dev-ubuntu16-$CUDA_NAME"
sh "docker pull $IMAGE_NAME"
def CUDA_VER="$CUDA_NAME" - "cuda"
def buildImage=docker.build(IMAGE_NAME,
"-f jenkins/Dockerfile.ubuntu16 --build-arg CUDA_VER=$CUDA_VER --cache-from $IMAGE_NAME -t $IMAGE_NAME .")
buildImage.inside("--runtime=nvidia -v ${HOME}/.m2:${HOME}/.m2:rw -v ${HOME}/.zinc:${HOME}/.zinc:rw") {
sh "bash $BUILD_SCRIPT $MVN_URM_MIRROR"
sh "bash $BUILD_SCRIPT"
step([$class: 'JacocoPublisher',
execPattern: '**/target/jacoco.exec',
classPattern: 'target/jacoco_classes/',
Expand Down
6 changes: 5 additions & 1 deletion jenkins/Jenkinsfile.release
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@
* Jenkinsfile for building and deploy rapids-plugin to public repo
*
*/
@Library('shared-libs') _

def urmUrl="https://${ArtifactoryConstants.ARTIFACTORY_NAME}/artifactory/sw-spark-maven"

pipeline {
agent { label 'docker-deploy||docker-gpu' }
Expand All @@ -38,11 +41,12 @@ pipeline {

environment {
JENKINS_ROOT='jenkins'
IMAGE_NAME="urm.nvidia.com/sw-spark-docker/plugin:dev-ubuntu16-cuda10.1"
IMAGE_NAME="${ArtifactoryConstants.ARTIFACTORY_NAME}/sw-spark-docker/plugin:dev-ubuntu16-cuda10.1"
LIBCUDF_KERNEL_CACHE_PATH='/tmp/.cudf'
MVN_MIRROR='-s jenkins/settings.xml -P mirror-apache-to-urm'
DIST_PL='dist'
SQL_PL='sql-plugin'
URM_URL = "${urmUrl}"
}

stages {
Expand Down
2 changes: 1 addition & 1 deletion jenkins/databricks/deploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ tar -zxvf ../spark-rapids-built.tgz
cd spark-rapids
echo "Maven mirror is $MVN_URM_MIRROR"
SERVER_ID='snapshots'
SERVER_URL='https://urm.nvidia.com:443/artifactory/sw-spark-maven-local'
SERVER_URL="$URM_URL-local"
FPATH=./dist/target/rapids-4-spark_$SCALA_VERSION-$DATABRICKS_VERSION.jar
mvn -B deploy:deploy-file $MVN_URM_MIRROR -Durl=$SERVER_URL -DrepositoryId=$SERVER_ID \
-Dfile=$FPATH -DpomFile=dist/pom.xml
18 changes: 13 additions & 5 deletions jenkins/settings.xml
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,13 @@
</snapshots>
<id>central</id>
<name>sw-spark-maven</name>
<url>https://urm.nvidia.com:443/artifactory/sw-spark-maven</url>
<url>${env.URM_URL}</url>
</repository>
<repository>
<snapshots />
<id>snapshots</id>
<name>sw-spark-maven</name>
<url>https://urm.nvidia.com:443/artifactory/sw-spark-maven</url>
<url>${env.URM_URL}</url>
</repository>
</repositories>
<pluginRepositories>
Expand All @@ -58,13 +58,13 @@
</snapshots>
<id>central</id>
<name>sw-spark-maven</name>
<url>https://urm.nvidia.com:443/artifactory/sw-spark-maven</url>
<url>${env.URM_URL}</url>
</pluginRepository>
<pluginRepository>
<snapshots />
<id>snapshots</id>
<name>sw-spark-maven</name>
<url>https://urm.nvidia.com:443/artifactory/sw-spark-maven</url>
<url>${env.URM_URL}</url>
</pluginRepository>
</pluginRepositories>
<id>artifactory</id>
Expand All @@ -75,12 +75,20 @@
<repository>
<id>apache.snapshots</id>
<name>sw-spark-maven</name>
<url>https://urm.nvidia.com:443/artifactory/sw-spark-maven</url>
<url>${env.URM_URL}</url>
</repository>
</repositories>
</profile>
<profile>
<id>deploy-to-urm</id>
<properties>
<altDeploymentRepository>snapshots::default::${env.URM_URL}-local</altDeploymentRepository>
</properties>
</profile>
</profiles>
<activeProfiles>
<activeProfile>artifactory</activeProfile>
<activeProfile>mirror-apache-to-urm</activeProfile>
<activeProfile>deploy-to-urm</activeProfile>
</activeProfiles>
</settings>
4 changes: 2 additions & 2 deletions jenkins/spark-premerge-build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ nvidia-smi

ARTF_ROOT="$WORKSPACE/.download"
MVN_GET_CMD="mvn org.apache.maven.plugins:maven-dependency-plugin:2.8:get -B \
-DremoteRepositories=$SERVER_URL \
$MVN_URM_MIRROR -DremoteRepositories=$URM_URL \
-Ddest=$ARTF_ROOT"

rm -rf $ARTF_ROOT && mkdir -p $ARTF_ROOT
Expand All @@ -37,7 +37,7 @@ export PATH="$SPARK_HOME/bin:$SPARK_HOME/sbin:$PATH"
tar zxf $SPARK_HOME.tar.gz -C $ARTF_ROOT && \
rm -f $SPARK_HOME.tar.gz

mvn -U -B "$@" clean verify -Dpytest.TEST_TAGS=''
mvn -U -B $MVN_URM_MIRROR clean verify -Dpytest.TEST_TAGS=''

# The jacoco coverage should have been collected, but because of how the shade plugin
# works and jacoco we need to clean some things up so jacoco will only report for the
Expand Down
2 changes: 1 addition & 1 deletion jenkins/spark-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ set -ex
ARTF_ROOT="$WORKSPACE/jars"
MVN_GET_CMD="mvn org.apache.maven.plugins:maven-dependency-plugin:2.8:get -B \
-Dmaven.repo.local=$WORKSPACE/.m2 \
-DremoteRepositories=$SERVER_URL \
$MVN_URM_MIRROR -DremoteRepositories=$URM_URL \
-Ddest=$ARTF_ROOT"

rm -rf $ARTF_ROOT && mkdir -p $ARTF_ROOT
Expand Down
3 changes: 1 addition & 2 deletions jenkins/version-def.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,7 @@ CUDA_CLASSIFIER=${CUDA_CLASSIFIER:-"cuda10-1"}
PROJECT_VER=${PROJECT_VER:-"0.2.0-SNAPSHOT"}
SPARK_VER=${SPARK_VER:-"3.0.0"}
SCALA_BINARY_VER=${SCALA_BINARY_VER:-"2.12"}
SERVER_URL=${SERVER_URL:-"https://urm.nvidia.com:443/artifactory/sw-spark-maven"}
SERVER_ID=${SERVER_ID:-"snapshots"}

echo "CUDF_VER: $CUDF_VER, CUDA_CLASSIFIER: $CUDA_CLASSIFIER, PROJECT_VER: $PROJECT_VER \
SPARK_VER: $SPARK_VER, SCALA_BINARY_VER: $SCALA_BINARY_VER, SERVER_URL: $SERVER_URL"
SPARK_VER: $SPARK_VER, SCALA_BINARY_VER: $SCALA_BINARY_VER"
11 changes: 0 additions & 11 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -543,16 +543,5 @@
<enabled>true</enabled>
</snapshots>
</repository>
<repository>
<id>snapshots</id>
<name>sw-spark-maven</name>
<url>https://urm.nvidia.com:443/artifactory/sw-spark-maven</url>
</repository>
</repositories>
<distributionManagement>
<repository>
<id>snapshots</id>
<url>https://urm.nvidia.com:443/artifactory/sw-spark-maven-local</url>
</repository>
</distributionManagement>
</project>

0 comments on commit 7376c47

Please sign in to comment.