Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove 30X permerge support for version 22.04 and above [skip ci] #4990

Merged
merged 2 commits into from
Mar 21, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion jenkins/Dockerfile-blossom.ubuntu
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@ RUN python3.8 -m easy_install pip
# Set default jdk as 1.8.0
RUN update-java-alternatives --set /usr/lib/jvm/java-1.8.0-openjdk-amd64

RUN ln -s /usr/bin/python3.8 /usr/bin/python
RUN ln -sfn /usr/bin/python3.8 /usr/bin/python
RUN ln -sfn /usr/bin/python3.8 /usr/bin/python3
RUN python -m pip install pytest sre_yield requests pandas pyarrow findspark pytest-xdist pre-commit pytest-order

# libnuma1 and libgomp1 are required by ucx packaging
Expand Down
38 changes: 28 additions & 10 deletions jenkins/Jenkinsfile-blossom.premerge
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/local/env groovy
/*
* Copyright (c) 2020-2021, NVIDIA CORPORATION.
* Copyright (c) 2020-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -44,6 +44,23 @@ def PREMERGE_CI_2_ARGUMENT // argument for 'spark-premerge-build.sh' running fro
def TestResult1 // keep ci_1 pytest result files that's in JUnit-XML style
def TestResult2 // keep ci_2 pytest result files that's in JUnit-XML style

def sourcePattern30X = 'shims/spark301db/src/main/scala/,' +
'shims/spark301/src/main/scala/,' +
'shims/spark302/src/main/scala/,' +
'shims/spark303/src/main/scala/,' +
'shims/spark304/src/main/scala/'
def sourcePattern31X = 'shims/spark311/src/main/scala/,' +
'shims/spark312/src/main/scala/,' +
'shims/spark313/src/main/scala/,' +
'shims/spark314/src/main/scala/'
def sourcePattern32X = 'shims/spark320/src/main/scala/,' +
'shims/spark321/src/main/scala/,' +
'shims/spark322/src/main/scala/'
def sourcePattern33X = 'shims/spark330/src/main/scala/'
def sourcePattern31XPlus = sourcePattern31X + ',' + sourcePattern32X + ',' + sourcePattern33X
def sourcePattern = 'shuffle-plugin/src/main/scala/,udf-compiler/src/main/scala/,' +
'sql-plugin/src/main/java/,sql-plugin/src/main/scala/'

// constant parameters for aws and azure databricks cluster
// CSP params
ID_HOST = 0
Expand Down Expand Up @@ -239,14 +256,20 @@ pipeline {
major_ver = versions[0].toInteger()
minor_ver = versions[1].toInteger()

sourcePattern = sourcePattern + ',' + sourcePattern31XPlus
// TODO: remove major version 21.XX pre-merge support
if (major_ver == 21) {
if (minor_ver == 8) {
PREMERGE_CI_2_ARGUMENT = "unit_test" // for '21.08' version
} else if (minor_ver >= 10) {
PREMERGE_CI_2_ARGUMENT = "ci_2" // for '21.10' or later version
}
sourcePattern = sourcePattern + ',' + sourcePattern30X
} else if (major_ver >= 22) {
PREMERGE_CI_2_ARGUMENT = "ci_2"
if (major_ver == 22 && minor_ver < 4) { // 22.02-
sourcePattern = sourcePattern + ',' + sourcePattern30X
}
} else {
error("Unsupported major version: $major_ver")
}
Expand Down Expand Up @@ -322,13 +345,7 @@ pipeline {
execPattern : '**/target/jacoco.exec',
classPattern : 'target/jacoco_classes/',
sourceInclusionPattern: '**/*.java,**/*.scala',
sourcePattern : 'shuffle-plugin/src/main/scala/,' +
'udf-compiler/src/main/scala/,sql-plugin/src/main/java/,' +
'sql-plugin/src/main/scala/,shims/spark311/src/main/scala/,' +
'shims/spark301db/src/main/scala/,shims/spark301/src/main/scala/,' +
'shims/spark302/src/main/scala/,shims/spark303/src/main/scala/,' +
'shims/spark304/src/main/scala/,shims/spark312/src/main/scala/,' +
'shims/spark313/src/main/scala/'
sourcePattern : sourcePattern
])
} finally {
// Save pytest result and publish to Jenkins at last
Expand Down Expand Up @@ -384,8 +401,9 @@ pipeline {
stage('DB runtime 7.3') {
when {
beforeAgent true
expression {
db_build && major_ver >= 21
anyOf {
expression { db_build && major_ver == 21 }
expression { db_build && major_ver == 22 && minor_ver <= 2 }
}
}

Expand Down