diff --git a/jenkins/Jenkinsfile.301.integration b/jenkins/Jenkinsfile.301.integration index 93188fde0c1..e2aad44a630 100644 --- a/jenkins/Jenkinsfile.301.integration +++ b/jenkins/Jenkinsfile.301.integration @@ -38,7 +38,7 @@ pipeline { parameters { string(name: 'OVERWRITE_PARAMS', defaultValue: '', description: 'parameters format XXX_VER=xxx;YYY_VER=yyy;') - string(name: 'REF', defaultValue: 'branch-0.2', description: 'Commit to build') + string(name: 'REF', defaultValue: 'branch-0.3', description: 'Commit to build') } environment { diff --git a/jenkins/Jenkinsfile.302.integration b/jenkins/Jenkinsfile.302.integration index d5258751369..9b7ff1fa48e 100644 --- a/jenkins/Jenkinsfile.302.integration +++ b/jenkins/Jenkinsfile.302.integration @@ -38,7 +38,7 @@ pipeline { parameters { string(name: 'OVERWRITE_PARAMS', defaultValue: '', description: 'parameters format XXX_VER=xxx;YYY_VER=yyy;') - string(name: 'REF', defaultValue: 'branch-0.2', description: 'Commit to build') + string(name: 'REF', defaultValue: 'branch-0.3', description: 'Commit to build') } environment { diff --git a/jenkins/Jenkinsfile.310.integration b/jenkins/Jenkinsfile.310.integration index 6a7299b44c9..f7c3d830944 100644 --- a/jenkins/Jenkinsfile.310.integration +++ b/jenkins/Jenkinsfile.310.integration @@ -38,7 +38,7 @@ pipeline { parameters { string(name: 'OVERWRITE_PARAMS', defaultValue: '', description: 'parameters format XXX_VER=xxx;YYY_VER=yyy;') - string(name: 'REF', defaultValue: 'branch-0.2', description: 'Commit to build') + string(name: 'REF', defaultValue: 'branch-0.3', description: 'Commit to build') } environment { diff --git a/jenkins/Jenkinsfile.databricksnightly b/jenkins/Jenkinsfile.databricksnightly index 4d56709b695..f8ead3def20 100644 --- a/jenkins/Jenkinsfile.databricksnightly +++ b/jenkins/Jenkinsfile.databricksnightly @@ -44,14 +44,14 @@ pipeline { choice(name: 'DEPLOY_TO', choices: ['Urm', 'Local'], description: 'Where to deploy artifacts to') string(name: 'DATABRICKS_VERSION', - defaultValue: '0.2.0-SNAPSHOT', description: 'Version to set') + defaultValue: '0.3.0-SNAPSHOT', description: 'Version to set') string(name: 'CUDF_VERSION', defaultValue: '0.15', description: 'Cudf version to use') string(name: 'CUDA_VERSION', defaultValue: 'cuda10-1', description: 'cuda version to use') string(name: 'CLUSTER_ID', - defaultValue: '0617-140138-umiak14', description: 'databricks cluster id') - string(name: 'REF', defaultValue: 'branch-0.2', description: 'Commit to build') + defaultValue: '0909-141326-pawl52', description: 'databricks cluster id') + string(name: 'REF', defaultValue: 'branch-0.3', description: 'Commit to build') } environment { diff --git a/jenkins/Jenkinsfile.integration b/jenkins/Jenkinsfile.integration index 82515534480..8890a30ed28 100644 --- a/jenkins/Jenkinsfile.integration +++ b/jenkins/Jenkinsfile.integration @@ -40,7 +40,7 @@ pipeline { description: 'parameters format XXX_VER=xxx;YYY_VER=yyy;') string(name: 'CUDA_CLASSIFIER', defaultValue: 'cuda10-1', description: '-Dclassifier=\n\n cuda10-1, cuda10-2, EMPTY as cuda10-1') - string(name: 'REF', defaultValue: 'branch-0.2', description: 'Commit to build') + string(name: 'REF', defaultValue: 'branch-0.3', description: 'Commit to build') } environment { diff --git a/jenkins/Jenkinsfile.nightly b/jenkins/Jenkinsfile.nightly index f4e471e9aac..aed8eee7355 100644 --- a/jenkins/Jenkinsfile.nightly +++ b/jenkins/Jenkinsfile.nightly @@ -34,7 +34,7 @@ pipeline { } parameters { - string(name: 'REF', defaultValue: 'branch-0.2', description: 'Commit to build') + string(name: 'REF', defaultValue: 'branch-0.3', description: 'Commit to build') } environment { @@ -83,9 +83,9 @@ pipeline { always { script { if (currentBuild.currentResult == "SUCCESS") { - build(job: 'spark/rapids_integration-0.2-github', + build(job: 'spark/rapids_integration-0.3-github', propagate: false, - parameters: [string(name: 'REF', value: 'branch-0.2')]) + parameters: [string(name: 'REF', value: 'branch-0.3')]) slack("#rapidsai-spark-cicd", "Success", color: "#33CC33") } else { diff --git a/jenkins/spark-premerge-build.sh b/jenkins/spark-premerge-build.sh index f26b82cb9a6..5b2945e33ee 100755 --- a/jenkins/spark-premerge-build.sh +++ b/jenkins/spark-premerge-build.sh @@ -37,7 +37,7 @@ export PATH="$SPARK_HOME/bin:$SPARK_HOME/sbin:$PATH" tar zxf $SPARK_HOME.tgz -C $ARTF_ROOT && \ rm -f $SPARK_HOME.tgz -mvn -U -B $MVN_URM_MIRROR '-Pinclude-databricks,!snapshot-shims' clean verify -Dpytest.TEST_TAGS='' +mvn -U -B $MVN_URM_MIRROR '!snapshot-shims' clean verify -Dpytest.TEST_TAGS='' # Run the unit tests for other Spark versions but dont run full python integration tests env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark301tests,snapshot-shims test -Dpytest.TEST_TAGS='' env -u SPARK_HOME mvn -U -B $MVN_URM_MIRROR -Pspark310tests,snapshot-shims test -Dpytest.TEST_TAGS='' diff --git a/jenkins/version-def.sh b/jenkins/version-def.sh index 3e4ee417bcb..d2160d0d317 100755 --- a/jenkins/version-def.sh +++ b/jenkins/version-def.sh @@ -28,7 +28,7 @@ IFS=$PRE_IFS CUDF_VER=${CUDF_VER:-"0.15"} CUDA_CLASSIFIER=${CUDA_CLASSIFIER:-"cuda10-1"} -PROJECT_VER=${PROJECT_VER:-"0.2.0-SNAPSHOT"} +PROJECT_VER=${PROJECT_VER:-"0.3.0-SNAPSHOT"} SPARK_VER=${SPARK_VER:-"3.0.0"} SCALA_BINARY_VER=${SCALA_BINARY_VER:-"2.12"} SERVER_ID=${SERVER_ID:-"snapshots"}