CI: Revise pipelines, use "integration" pipeline by default
...and "build" pipeline for feature branches.
/*
* Jenkins Pipeline definitions for Smalltalk/X jv-branch
*
* Following pipelines are defined here:
*
* * `build()` - compiles and tests all supported configurations
* No artifacts are archived
*
* * `integration()` - like `build()` pipeline, then all test pass on all
* configurations, archive artifacts and push staged changes to opstream.
*/
/*
* Supported configurations
*/
configurations = [
'NODE': ['windows' , 'linux' ],
'ARCH': ['i386', 'x86_64' ]
]
/*
* "Default" pipeline:
*/
def build() {
stage ( "Build" ) {
matrix ( configurations ) {
stage ( "Checkout - {${env.NODE}-${env.ARCH}} " ) {
sshagent([ scm.getCredentialsId() ]) {
sh """
if [ -f Jenkinsfile.rb ]; then
hg pull --ssh ssh ${scm.getSource()}
else
hg clone --ssh ssh ${scm.getSource()} .
fi
hg up ${branch()}
"""
sh "ruby Jenkinsfile.rb --project \"stx:jv-branch\" --arch ${env.ARCH} update"
}
}
stage ( "Compile - {${env.NODE}-${env.ARCH}}") {
sh "ruby Jenkinsfile.rb --project \"stx:jv-branch\" compile"
}
}
}
stage ( "Test" ) {
matrix ( configurations ) {
stage ( "Test - {${env.NODE}-${env.ARCH}}") {
sh "ruby Jenkinsfile.rb --project \"stx:jv-branch\" test"
junit allowEmptyResults: true, testResults: "reports/*build${env.BUILD_NUMBER}*.xml"
}
}
}
}
/*
* "Integration" pipeline
*/
def integration() {
build()
/*
* If a single test fails, abort the pipeline. There's no point
* archiving a broken build.
*/
if (currentBuild.result == 'UNSTABLE') {
return;
}
artifacts()
push_to_upstream()
}
def combine(configurations, axes = null, axis = 0, partial = new HashMap(), combinations = []) {
def axes0 = axes
if (axes0 == null) {
axes0 = configurations.keySet().toArray();
}
if ( axis < axes0.length ) {
for ( value in configurations[axes0[axis]] ) {
def combined = partial.clone()
combined[axes0[axis]] = value
combine(configurations, axes0, axis + 1, combined, combinations)
}
} else {
combinations.add(partial)
}
return combinations;
}
def matrix(configurations, block) {
def combinations = combine(configurations).toArray()
def branches = [failFast: true]
for (i = 0; i < combinations.length; i++) {
def index = i
def conf = combinations[i];
branches["${conf.NODE}-${conf.ARCH}"] = {
node ( conf.NODE ) {
def newEnv = []
for (k in conf.keySet()) {
newEnv.add("${k}=${conf[k]}")
}
withEnv ( newEnv ) {
ws ("workspace/${env.JOB_NAME}/${env.ARCH}") {
block()
}
}
}
}
}
parallel branches
}
def any(configurations, block) {
def axes = configurations.keySet().toArray()
def conf = [:]
for (axis in axes) {
conf[axis] = configurations[axis][0]
}
node ( conf.NODE ) {
def newEnv = []
for (k in conf.keySet()) {
newEnv.add("${k}=${conf[k]}")
}
withEnv ( newEnv ) {
ws ("workspace/${env.JOB_NAME}/${env.ARCH}") {
block()
}
}
}
}
def artifacts() {
matrix ( configurations ) {
stage ( "Artifacts - {${env.NODE}-${env.ARCH}}") {
sh "ruby Jenkinsfile.rb --project \"stx:jv-branch\" artifacts"
archiveArtifacts artifacts: "artifacts/*build${env.BUILD_NUMBER}*.zip, artifacts/*build${env.BUILD_NUMBER}*.bz2, artifacts/*build${env.BUILD_NUMBER}*.sha256", fingerprint: true//, onlyIfSuccessful: true
}
}
}
def push_to_upstream() {
any ( configurations ) {
withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: '1c40eb8f-c7ff-4ef7-8fc8-96aa6310907c', passwordVariable: 'pass', usernameVariable: 'user']]) {
sh "rake \"workflow:push-upstream[${user}, ${pass}]\""
}
}
}
/*
* A helper function to return a branch to build.
*
* For normal jobs this is the value of 'Branch' field from SCM configuration.
* For multibranch jobs, this is the value of BRANCH_NAME environment variable.
*
* There's a copy of this function in `Jenkinsfile`. These two should be kept
* in sync!
*/
def branch() {
if (env.BRANCH_NAME == null) {
return scm.getBranch();
} else {
return env.BRANCH_NAME;
}
}
return this;