CI: Changed credentials ID for pushing changes to `workflow-push-upstream`
from `workflow:push-upstream` as double-colon is not allowed in credentials ID.
/*
* Jenkins Pipeline definitions for Smalltalk/X jv-branch
*
* Following pipelines are defined here:
*
* * `build()` - compiles and tests all supported configurations
* No artifacts are archived
*
* * `integration()` - like `build()` pipeline, then all test pass on all
* configurations, archive artifacts and push staged changes to opstream.
*/
import com.cloudbees.plugins.credentials.CredentialsProvider;
import com.cloudbees.plugins.credentials.common.StandardUsernameCredentials;
import com.cloudbees.jenkins.plugins.sshcredentials.SSHUserPrivateKey;
/**
* Supported configurations. To overwrite, do:
*
* ...
* pipeline = load "pipeline.groovy"
* ...
* pipeline.configurations = [ 'NODE': 'qnx', 'ARCH' : 'mips' ]
* ...
*
* Default value: see below.
*/
configurations = [
'NODE': ['windows' , 'linux' ],
'ARCH': ['i386', 'x86_64' ]
]
/** Branch to build. To overwrite do:
*
* ...
* pipeline = load "pipeline.groovy"
* ...
* pipeline.branch = "issue-123"
* ...
*
* Default value:
* For multibranch jobs, this is the value of BRANCH_NAME environment variable.
* For normal jobs this is the value of 'Branch' field from SCM configuration.
*
* Due to Jenkins internals, the fallback to use SCM configuration must be done
* by 'user' of this variable (i.e., use `branch != null ? branch : scm.getBranch())
*/
branch = env.BRANCH_NAME
/**
* Workspace to use. To overwrite do:
*
* ...
* pipeline = load "pipeline.groovy"
* ...
* pipeline.workspace = "some-other-job"
* ...
*
* Default value:
* Name of current job.
*/
workspace = env.JOB_NAME
/*
* "Default" pipeline:
*/
def build() {
stage ( "Build" ) {
matrix ( configurations ) {
stage ( "Checkout - {${env.NODE}-${env.ARCH}} " ) {
sshagent([ scm.getCredentialsId() ]) {
sh """
if [ -f Jenkinsfile.rb ]; then
hg pull --ssh ssh ${scm.getSource()}
else
hg clone --ssh ssh ${scm.getSource()} .
fi
hg up ${branch != null ? branch : scm.getBranch()}
"""
sh "ruby Jenkinsfile.rb --project \"stx:jv-branch\" --arch ${env.ARCH} update"
}
}
stage ( "Compile - {${env.NODE}-${env.ARCH}}") {
sh "ruby Jenkinsfile.rb --project \"stx:jv-branch\" compile"
}
}
}
stage ( "Test" ) {
matrix ( configurations ) {
stage ( "Test - {${env.NODE}-${env.ARCH}}") {
sh "ruby Jenkinsfile.rb --project \"stx:jv-branch\" test"
junit allowEmptyResults: true, testResults: "reports/*build${env.BUILD_NUMBER}*.xml"
}
}
}
}
/*
* "Integration" pipeline
*/
def integration() {
build()
/*
* If a single test fails, abort the pipeline. There's no point
* archiving a broken build.
*/
println "Smalltalk/X built, job status is: ${currentBuild.result}"
if (currentBuild.result == 'UNSTABLE') {
return;
}
artifacts()
push()
}
def combine(configurations, axes = null, axis = 0, partial = new HashMap(), combinations = []) {
def axes0 = axes
if (axes0 == null) {
axes0 = configurations.keySet().toArray();
}
if ( axis < axes0.length ) {
for ( value in configurations[axes0[axis]] ) {
def combined = partial.clone()
combined[axes0[axis]] = value
combine(configurations, axes0, axis + 1, combined, combinations)
}
} else {
combinations.add(partial)
}
return combinations;
}
def matrix(configurations, block) {
def combinations = combine(configurations).toArray()
def branches = [failFast: true]
for (i = 0; i < combinations.length; i++) {
def index = i
def conf = combinations[i];
branches["${conf.NODE}-${conf.ARCH}"] = {
node ( conf.NODE ) {
def newEnv = []
for (k in conf.keySet()) {
newEnv.add("${k}=${conf[k]}")
}
withEnv ( newEnv ) {
ws ("workspace/${workspace}/${env.ARCH}") {
block()
}
}
}
}
}
parallel branches
}
def any(configurations, block) {
def axes = configurations.keySet().toArray()
def conf = [:]
for (axis in axes) {
conf[axis] = configurations[axis][0]
}
node ( conf.NODE ) {
def newEnv = []
for (k in conf.keySet()) {
newEnv.add("${k}=${conf[k]}")
}
withEnv ( newEnv ) {
ws ("workspace/${workspace}/${env.ARCH}") {
block()
}
}
}
}
def artifacts() {
matrix ( configurations ) {
stage ( "Artifacts - {${env.NODE}-${env.ARCH}}") {
sh "ruby Jenkinsfile.rb --project \"stx:jv-branch\" artifacts"
archiveArtifacts artifacts: "artifacts/*build${env.BUILD_NUMBER}*.zip, artifacts/*build${env.BUILD_NUMBER}*.bz2, artifacts/*build${env.BUILD_NUMBER}*.sha256", fingerprint: true//, onlyIfSuccessful: true
}
}
}
/**
* Push changes to upstream reporitory. To be called after a successful
* build. See #build()
*/
def push() {
any ( configurations ) {
stage ( "Push to upstream" ) {
/*
* Kludge: This stage may push changes to public BitBucket
* repositories. To access repos on BitBucket, I (JV) don't
* want to use the same key / password as for checkouts from
* staging repositories,
*
* Therefore, also look for another credentials with ID
* `workflow:push-upstream`. If it exists, then use these to
* push to upstrem repository. If no such credentials exist,
* use standard credentials.
*
* So, here we go:
*/
def id1 = "workflow-push-upstream";
def id2 = scm.getCredentialsId();
def credentials = null;
for (StandardUsernameCredentials c : CredentialsProvider.lookupCredentials(StandardUsernameCredentials.class)) {
if (c.getId().equals(id1)) {
credentials = c;
break;
}
}
if (credentials == null) {
for (StandardUsernameCredentials c : CredentialsProvider.lookupCredentials(StandardUsernameCredentials.class)) {
if (c.getId().equals(id2)) {
credentials = c;
break;
}
}
}
println "Using credentials ${credentials.getId()}: ${credentials.getDescription()}"
if (credentials instanceof SSHUserPrivateKey) {
sshagent([ credentials.getId() ]) {
sh "rake \"workflow:push-upstream\""
}
} else {
withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: credentials.getId(), passwordVariable: 'pass', usernameVariable: 'user']]) {
sh "rake \"workflow:push-upstream[${user}, ${pass}]\""
}
}
}
}
}
return this;