Merged branch stx-8.0.0 into default.
authorJan Vrany <jan.vrany@fit.cvut.cz>
Tue, 12 Jun 2018 21:27:24 +0000
changeset 236 5a4e789cdd40
parent 158 f56a032f2cd7 (current diff)
parent 235 52aceb57d976 (diff)
child 237 69ab924b97bf
Merged branch stx-8.0.0 into default.
Jenkinsfile
Jenkinsfile.rb
bin/unzip.exe
bin/zip.exe
build.rb
ci/steps.groovy
makelib/config-linux-gnu.make
makelib/config-linux-i386.make
makelib/config-linux-x86_64.make
makelib/config-linux.make
makelib/config.make
pipeline.groovy
rakelib/clean.rake
rakelib/compile.rake
rakelib/extensions.rb
rakelib/install.rake
rakelib/setup.rake
rakelib/test.rake
specs/baseline.rbspec
specs/repositories.rbspec
specs/stx-jv.rbspec
tests/dsl_tests.rb
--- a/Jenkinsfile	Fri May 11 12:10:26 2018 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,62 +0,0 @@
-/*
- * Run default pipeline. See `pipeline.groovy for details.
- */
-
-def pipeline;
-
-/*
- * A helper function to return a branch to build.
- *
- * For normal jobs this is the value of 'Branch' field from SCM configuration.
- * For multibranch jobs, this is the value of BRANCH_NAME environment variable.
- *
- * This is the same as build() function in `pipeline.groovy` and should be kept
- * in sync. We cannot use the one there as we yet have to load it.
- */
-def branch() {
-    if (env.BRANCH_NAME == null) {
-        return scm.getBranch();
-    } else {
-        return env.BRANCH_NAME;
-    }
-}
-
-stage ( "Load Pipeline") {
-    node {
-        /*
-         * Do not use default workspace here as checkout
-         * would erase all contents. Use a dedicated
-         * workspace instead
-         */
-        ws ("workspace/${env.JOB_NAME}@loadpipeline") {
-            sshagent([ scm.getCredentialsId() ]) {
-                sh  """
-                    if [ -f pipeline.groovy ]; then
-                        hg pull --ssh ssh ${scm.getSource()}
-                    else
-                        hg clone --ssh ssh ${scm.getSource()} .
-                    fi
-                    hg up ${branch()}
-                    """
-            }
-            pipeline = load "pipeline.groovy"
-        }
-    }
-}
-
-/*
- * Set the branch to build to make sure it's in sync
- */
-pipeline.branch = branch()
-
-/*
- * If we're building a 'default' branch, run "integration" pipeline. Otherwise,
- * run normal "build" pipeline, assuming this build is just a test build for
- * some feature-in-progress. If different logic is needed, then make a branch
- * and modify this file
- */
-if ( branch().equals("default") ) {
-    pipeline.integration()
-} else {
-    pipeline.build()
-}
--- a/Jenkinsfile.rb	Fri May 11 12:10:26 2018 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,84 +0,0 @@
-#!/usr/bin/ruby
-DOCUMENTATION = <<DOCEND
-A help script to build a Smalltalk/X jv-branch (mainly) on a Jenkins CI. 
-If no TARGET is given, invokes target `jenkins:job`.
-
-DOCEND
-
-require 'optparse'
-
-def run()
-  optparse = OptionParser.new do | opts |
-    opts.banner = "Usage: #{$0} [TARGET1 [TARGET1 [...]]\n"
-    opts.on('-p', '--project PROJECT', "PROJECT to build. Overrides project specified by the environment variable.") do | value |
-      ENV['PROJECT'] = value
-      self.class.const_set('PROJECT', value)  
-    end
-
-    opts.on('-a', '--arch ARCH', "Arcitecture to build for. Overrides architecture specified by the environment variable.") do | value |
-      ENV['ARCH'] = value
-      self.class.const_set('ARCH', value)  
-    end
-
-    opts.on('-r', '--repository-set REPOSET', "Repository set to use. Overrides repository set specified by the environment variable.") do | value |
-      ENV['REPOSITORYSET'] = value
-      self.class.const_set('REPOSITORYSET', value)  
-    end    
-
-    opts.on(nil, '--help', "Prints this message") do
-      puts DOCUMENTATION
-      puts optparse.help()
-      exit 0
-    end
-  end
-
-  optparse.parse!
-
-  # Make sure a repository set is defined. make it an error if not. 
-  if not ENV['REPOSITORYSET'] then
-    puts "ERROR: Repository set not specified. "
-    puts "       Use --repository-set option or set REPOSITORYSET environment"
-    exit 126
-  end
-
-  # If run outside a Jenkins build environment (such as from a command line), 
-  # define some variables to make it look more like a proper  Jenkins build 
-  # environment. 
-  ENV['WORKSPACE'] ||= '.'
-  ENV['BUILD_NUMBER'] ||= Time.now.strftime("%Y%m%d")
-  ENV['JOB_NAME'] ||= 'interactive'
-
-  # If no target is given, run target jenkins:job
-  if ARGV.size == 0 then
-    ARGV << 'jenkins:job'
-  end
-
-  # Wipe out `reports` directory. This is needed for two reasons: 
-  #
-  # 1) There's a bug in Cobertura plugin so it does not expand 
-  #    variables in .xml file pattern so the pattern cannot include
-  #    BUILD_NUMBER to tell reports for particular build. 
-  #    See https://issues.jenkins-ci.org/browse/JENKINS-30647
-  # 2) More importantly, when additional axis is used (such as JDK
-  #    or Mercurial version) then value of this axis is not reflected
-  #    in report filename. So again, all files, even those from previous
-  #    builds would be matches which is not what we want. 
-  # 
-  # A workaround is to wipe-out `reports` directory before each buld,
-  # so once this script finishes, all reports there are for this build.
-  # The downside is that we have to limit number of executors to 1,
-  reports_dir = File.join(ENV['WORKSPACE'], 'reports')
-  if File.exist? reports_dir then
-    require 'fileutils'    
-    FileUtils.rm_rf reports_dir
-  end
-  
-  # When run under Jenkins, we do want to see full backtrace if something
-  # fails. 
-  ARGV << '--trace'
-
-  require 'rake'
-  Rake.application.run
-end
-
-run if __FILE__ == $0
--- a/bin/stmkmf.rb	Fri May 11 12:10:26 2018 +0100
+++ b/bin/stmkmf.rb	Tue Jun 12 21:27:24 2018 +0000
@@ -31,18 +31,21 @@
     make_proto_contents = File.read(File.join(cwd, 'Make.proto')).encode("UTF-8", :invalid=>:replace, :replace=>"?").encode('UTF-8')
     match = /^TOP=(.*)$/.match(make_proto_contents)
     if match.nil? then
-      STDERR.puts "error: could not ectract TOP from Make.proto (missing TOP=.. definition?)"    
+      STDERR.puts "error: could not extract TOP from Make.proto (missing TOP=.. definition?)"    
       return 4
     end
-    top = match[1]
+    top = match[1].rstrip()
   end
   if not File.directory? top then
-    STDERR.puts "error: TOP not a cwd: #{top}"    
+    require 'pry'
+    require 'pry-byebug'
+    binding.pry
+    STDERR.puts "error: TOP does not exist or not a directory: #{top}"    
     return 5
   end
   makelib = File.join(cwd, top, '..', '..' , 'makelib')
   if not File.exist? makelib then
-    STDERR.puts "error: could not find out include cwd: #{makelib}"    
+    STDERR.puts "error: makefile include library does not exist or not a directory: #{makelib}"
     return 6
   end
   File.open(File.join(cwd, out), "w") do | f |
Binary file bin/unzip.exe has changed
Binary file bin/zip.exe has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/build.rb	Tue Jun 12 21:27:24 2018 +0000
@@ -0,0 +1,84 @@
+#!/usr/bin/ruby
+DOCUMENTATION = <<DOCEND
+A help script to build a Smalltalk/X jv-branch (mainly) on a Jenkins CI. 
+If no TARGET is given, invokes target `jenkins:job`.
+
+DOCEND
+
+require 'optparse'
+
+def run()
+  optparse = OptionParser.new do | opts |
+    opts.banner = "Usage: #{$0} [TARGET1 [TARGET1 [...]]\n"
+    opts.on('-p', '--project PROJECT', "PROJECT to build. Overrides project specified by the environment variable.") do | value |
+      ENV['PROJECT'] = value
+      self.class.const_set('PROJECT', value)  
+    end
+
+    opts.on('-t', '--build-target BUILD_TARGET', "Target to build for in form of GNU target triplet (such as 'x86_64-pc-linux-gnu'). Overrides build target specified by the environment variable.") do | value |
+      ENV['BUILD_TARGET'] = value
+      self.class.const_set('BUILD_TARGET', value)  
+    end
+
+    opts.on('-r', '--repository-set REPOSET', "Repository set to use. Overrides repository set specified by the environment variable.") do | value |
+      ENV['REPOSITORYSET'] = value
+      self.class.const_set('REPOSITORYSET', value)  
+    end    
+
+    opts.on(nil, '--help', "Prints this message") do
+      puts DOCUMENTATION
+      puts optparse.help()
+      exit 0
+    end
+  end
+
+  optparse.parse!
+
+  # Make sure a repository set is defined. make it an error if not. 
+  if not ENV['REPOSITORYSET'] then
+    puts "ERROR: Repository set not specified. "
+    puts "       Use --repository-set option or set REPOSITORYSET environment"
+    exit 126
+  end
+
+  # If run outside a Jenkins build environment (such as from a command line), 
+  # define some variables to make it look more like a proper  Jenkins build 
+  # environment. 
+  ENV['WORKSPACE'] ||= '.'
+  ENV['BUILD_NUMBER'] ||= Time.now.strftime("%Y%m%d")
+  ENV['JOB_NAME'] ||= 'interactive'
+
+  # If no target is given, run target jenkins:job
+  if ARGV.size == 0 then
+    ARGV << 'jenkins:job'
+  end
+
+  # Wipe out `reports` directory. This is needed for two reasons: 
+  #
+  # 1) There's a bug in Cobertura plugin so it does not expand 
+  #    variables in .xml file pattern so the pattern cannot include
+  #    BUILD_NUMBER to tell reports for particular build. 
+  #    See https://issues.jenkins-ci.org/browse/JENKINS-30647
+  # 2) More importantly, when additional axis is used (such as JDK
+  #    or Mercurial version) then value of this axis is not reflected
+  #    in report filename. So again, all files, even those from previous
+  #    builds would be matches which is not what we want. 
+  # 
+  # A workaround is to wipe-out `reports` directory before each buld,
+  # so once this script finishes, all reports there are for this build.
+  # The downside is that we have to limit number of executors to 1,
+  reports_dir = File.join(ENV['WORKSPACE'], 'reports')
+  if File.exist? reports_dir then
+    require 'fileutils'    
+    FileUtils.rm_rf reports_dir
+  end
+  
+  # When run under Jenkins, we do want to see full backtrace if something
+  # fails. 
+  ARGV << '--trace'
+
+  require 'rake'
+  Rake.application.run
+end
+
+run if __FILE__ == $0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/ci/build.groovy	Tue Jun 12 21:27:24 2018 +0000
@@ -0,0 +1,62 @@
+/*
+ * Run default pipeline. See `steps.groovy` for details.
+ */
+
+def steps;
+
+/*
+ * A helper function to return a branch to build.
+ *
+ * For normal jobs this is the value of 'Branch' field from SCM configuration.
+ * For multibranch jobs, this is the value of BRANCH_NAME environment variable.
+ *
+ * This is the same as build() function in `steps.groovy` and should be kept
+ * in sync. We cannot use the one there as we yet have to load it.
+ */
+def branch() {
+    if (env.BRANCH_NAME == null) {
+        return scm.getBranch();
+    } else {
+        return env.BRANCH_NAME;
+    }
+}
+
+stage ( "Load Pipeline") {
+    node {
+        /*
+         * Do not use default workspace here as checkout
+         * would erase all contents. Use a dedicated
+         * workspace instead
+         */
+        ws ("workspace/${env.JOB_NAME}@loadpipeline") {
+            sshagent([ scm.getCredentialsId() ]) {
+                sh  """
+                    if [ -f ci/steps.groovy ]; then
+                        hg pull --ssh ssh ${scm.getSource()}
+                    else
+                        hg clone --ssh ssh ${scm.getSource()} .
+                    fi
+                    hg up ${branch()}
+                    """
+            }
+            steps = load "ci/steps.groovy"
+        }
+    }
+}
+
+/*
+ * Set the branch to build to make sure it's in sync
+ */
+steps.branch = branch()
+
+/*
+ * If we're building a 'default' branch, run "integration" pipeline. Otherwise,
+ * run normal "build" pipeline, assuming this build is just a test build for
+ * some feature-in-progress. If different logic is needed, then make a branch
+ * and modify this file
+ */
+if ( branch().equals("default") ) {
+    steps.integration()
+} else {
+    steps.build()
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/ci/publish.groovy	Tue Jun 12 21:27:24 2018 +0000
@@ -0,0 +1,47 @@
+/*
+ * This is a publish pipeline
+ */
+
+
+pipeline {
+    agent { label 'linux' }
+
+    parameters {
+        string(name: 'job', description: 'Job whose artifacts to upload',  defaultValue: 'stx_jv')
+        string(name: 'srv', description: 'Server (and username) where to upload artifacts',  defaultValue: "${env.STX_PUBLISH_SERVER}")
+        string(name: 'dir', description: 'Directory where to upload artifacts  ',  defaultValue: "${env.STX_PUBLISH_DIRECTORY}/6.2.6")
+    }
+
+    stages {
+        stage('Copy artifacts') {
+            steps {
+                copyArtifacts projectName: "${params.job}", selector: lastSuccessful(), excludes: '**/*prebuilt*'
+                sh "ls -al artifacts"
+            }
+        }
+
+        stage ( "Upload artifacts" ) {
+            steps {
+                sshagent([ "8be01219-a51e-410a-9850-bf9c134283fb" ]) {
+                    sh script:
+                        """
+                        sftp -b - ${params.srv} <<END
+                        cd ${params.dir}
+                        ls
+                        rm *
+                        put artifacts/*
+                        ls
+                        END
+                        """.stripIndent()
+                }
+            }
+        }
+        stage ( "Cleanup" ) {
+            steps {
+                cleanWs()
+            }
+        }
+    }
+}
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/ci/steps.groovy	Tue Jun 12 21:27:24 2018 +0000
@@ -0,0 +1,339 @@
+/*
+ * Jenkins Pipeline definitions for Smalltalk/X jv-branch
+ *
+ * Following stepss are defined here:
+ *
+ *  * `build()` - compiles and tests all supported configurations
+ *    No artifacts are archived
+ *
+ *  * `integration()` - like `build()` steps, then all test pass on all
+ *    configurations, archive artifacts and push staged changes to opstream
+ *    (if approvec by the user)
+ */
+
+import com.cloudbees.plugins.credentials.CredentialsProvider;
+import com.cloudbees.plugins.credentials.common.StandardUsernameCredentials;
+import com.cloudbees.jenkins.plugins.sshcredentials.SSHUserPrivateKey;
+
+/**
+ * Supported configurations. To overwrite, do:
+ *
+ *     ...
+ *     steps = load "steps.groovy"
+ *     ...
+ *     steps.configurations = [ 'BUILD_TARGET': 'mips-sgi-irix' ]
+ *     ...
+ *
+ * Default value: see below.
+ */
+configurations = [
+    'BUILD_TARGET': ['i686-pc-mingw32' , 'x86_64-w64-mingw32', 'i686-pc-linux-gnu', 'x86_64-pc-linux-gnu' ]
+]
+
+/** Branch to build. To overwrite do:
+ *
+ *     ...
+ *     steps = load "steps.groovy"
+ *     ...
+ *     steps.branch = "issue-123"
+ *     ...
+ *
+ * Default value:
+ * For multibranch jobs, this is the value of BRANCH_NAME environment variable.
+ * For normal jobs this is the value of 'Branch' field from SCM configuration.
+ *
+ * Due to Jenkins internals, the fallback to use SCM configuration must be done
+ * by 'user' of this variable (i.e., use `branch != null ? branch : scm.getBranch())
+ */
+branch = env.BRANCH_NAME
+
+/**
+ * Workspace to use. To overwrite do:
+ *
+ *     ...
+ *     steps = load "steps.groovy"
+ *     ...
+ *     steps.workspace = "some-other-job"
+ *     ...
+ *
+ * Default value:
+ * Name of current job.
+ */
+workspace = env.JOB_NAME
+
+/*
+ * "Default" steps:
+ */
+def build() {
+    stage ( "Build" ) {
+        matrix ( configurations ) {
+            stage ( "Checkout - {${env.BUILD_TARGET}} " ) {
+                sshagent([ scm.getCredentialsId() ]) {
+                    branch_to_use = branch != null ? branch : scm.getBranch()
+                    sh  """
+                        if [ -f build.rb ]; then
+                            hg pull --ssh ssh ${scm.getSource()}
+                        else
+                            hg clone --ssh ssh ${scm.getSource()} .
+                        fi
+                        hg up ${branch_to_use}
+                        """
+                    sh  "ruby build.rb --project \"stx:jv-branch\" --build-target ${env.BUILD_TARGET} update"
+                }
+            }
+
+            stage ( "Compile - {${env.BUILD_TARGET}}") {
+                sh "ruby build.rb --project \"stx:jv-branch\" --build-target ${env.BUILD_TARGET} compile"
+            }
+        }
+    }
+
+    stage ( "Test" ) {
+        matrix ( configurations ) {
+            stage ( "Test - {${env.BUILD_TARGET}}") {
+                /*
+                 * Some tests requires display, so:
+                 *
+                 *  * on *NIX hosts, launch Xvfb
+                 *  * on Windows, do nothing. Windows slave must be configured
+                 *    so it has an access to interactive window station (which
+                 *    means it has to run under user session, not as a service)
+                 */
+                if ( isUnix() ) {
+                    wrap([$class: 'Xvfb', autoDisplayName: true, additionalOptions: '-screen 0 1024x768x24 -pixdepths 24 4 8 15 16 32', parallelBuild: true]) {
+                        sh "ruby build.rb --project \"stx:jv-branch\" --build-target ${env.BUILD_TARGET} test"
+                    }
+                } else {
+                    sh "ruby build.rb --project \"stx:jv-branch\" --build-target ${env.BUILD_TARGET} test"
+                }
+                junit allowEmptyResults: true, testResults: "reports/*build${env.BUILD_NUMBER}*.xml"
+            }
+        }
+    }
+
+    /*
+     * Build and publish all artifacts - to allow (beta) testers to run full St/X without
+     * a need to build it itself
+     */
+    artifacts();
+    println "Smalltalk/X built, job status is: ${currentBuild.result}"
+    if ( currentBuild.result == null ) {
+        publish("8.0.0-rc")
+    }
+}
+
+/*
+ * "Integration" steps
+ */
+def integration() {
+    build()
+    /*
+     * If a single test fails, abort the steps. There's no point
+     * archiving a broken build.
+     */
+    println "Smalltalk/X built, job status is: ${currentBuild.result}"
+    if ( currentBuild.result == 'UNSTABLE' ) {
+        return;
+    }
+    artifacts()
+
+    /*
+     * Check if there are changes to be pushed to upstream. If so,
+     * ask user to approve that push
+     */
+    if ( changes() ) {
+        def integrate = false;
+
+        if (env.JENKINS_URL == "https://swing.fit.cvut.cz/jenkins/") {
+            integrate = true;
+        } else {
+            integrate = input(message: 'Integrate all staged changes to upstream?',
+                              parameters: [
+                                    booleanParam(name: "Integrate changes",
+                                         description: 'If checked, all staged changes will be pushed to an upstream repository',
+                                         defaultValue: true)]);
+        }
+        if ( integrate ) {
+            push()
+        }
+    }
+}
+
+/*
+ * Publish built artifacts to download server
+ */
+def publish(dir) {
+    Calendar current = Calendar.getInstance()
+    Calendar scheduled = current.clone()
+
+    /*
+     * Schedule the upload to the next day, 3am in the morning
+     */
+    scheduled.add(Calendar.DATE, 1)
+    scheduled.set(Calendar.HOUR_OF_DAY, 3)
+
+    build(job:        'stx_jv_publish',
+          parameters: [
+                        string(name: 'job', value: env.JOB_NAME),
+                        string(name: 'srv', value: env.STX_PUBLISH_SERVER),
+                        string(name: 'dir', value: "${env.STX_PUBLISH_DIRECTORY}/${dir}")
+                      ],
+          quietPeriod: (scheduled.getTimeInMillis() - current.getTimeInMillis()) / 1000,
+          wait: false)
+}
+
+
+
+/*
+ * Utility. Return true, if there are any changes to be pushed to an upstream,
+ * false othervise.
+ */
+def changes() {
+    changes = false;
+    any ( configurations ) {
+        withCredentialsForUpstream() { user, pass ->
+            status = sh ( script: "rake \"workflow:out-upstream[${user}, ${pass}]\"", returnStatus: true)
+        }
+        changes = status == 0;
+    }
+    return changes;
+}
+
+def combine(configurations, axes = null, axis = 0, partial = new HashMap(), combinations = []) {
+    def axes0 = axes
+    if (axes0 == null) {
+        axes0 = configurations.keySet().toArray();
+    }
+    if ( axis < axes0.length ) {
+        for ( value in configurations[axes0[axis]] ) {
+            def combined = partial.clone()
+            combined[axes0[axis]] = value
+            combine(configurations, axes0, axis + 1, combined, combinations)
+        }
+    } else {
+        combinations.add(partial)
+    }
+    return combinations;
+}
+
+
+def matrix(configurations, block) {
+    def combinations = combine(configurations).toArray()
+    def branches = [failFast: true]
+    for (i = 0; i < combinations.length; i++) {
+        def index = i
+        def conf = combinations[i];
+        branches["${conf.BUILD_TARGET}"] = {
+            node ( conf.BUILD_TARGET ) {
+                def newEnv = []
+                for (k in conf.keySet()) {
+                    newEnv.add("${k}=${conf[k]}")
+                }
+                withEnv ( newEnv ) {
+                    ws ("workspace/${workspace}/${env.BUILD_TARGET}") {
+                        block()
+                    }
+                }
+            }
+        }
+    }
+    parallel branches
+}
+
+
+
+def any(configurations, block) {
+    def axes = configurations.keySet().toArray()
+    def conf = [:]
+    for (axis in axes) {
+        conf[axis] = configurations[axis][0]
+    }
+    node ( conf.NODE ) {
+        def newEnv = []
+        for (k in conf.keySet()) {
+            newEnv.add("${k}=${conf[k]}")
+        }
+        withEnv ( newEnv ) {
+            ws ("workspace/${workspace}/${env.BUILD_TARGET}") {
+                block()
+            }
+        }
+    }
+}
+
+
+def artifacts() {
+    matrix ( configurations ) {
+        stage ( "Artifacts - {${env.BUILD_TARGET}}") {
+            sh "ruby build.rb --project \"stx:jv-branch\" --build-target ${env.BUILD_TARGET} artifacts"
+            archiveArtifacts artifacts: "artifacts/*build${env.BUILD_NUMBER}*.zip, artifacts/*build${env.BUILD_NUMBER}*.bz2, artifacts/*build${env.BUILD_NUMBER}*.sha256", fingerprint: true//, onlyIfSuccessful: true
+        }
+    }
+}
+
+/**
+ * Push changes to upstream reporitory. To be called after a successful
+ * build. See #build()
+ */
+def push() {
+    any ( configurations ) {
+        stage ( "Push to upstream" ) {
+            withCredentialsForUpstream { user, pass ->
+                sh "rake \"workflow:push-upstream[${user}, ${pass}]\""
+            }
+        }
+    }
+}
+
+/*
+ * Utility. Executes given block with credentials for upstream repository.
+ */
+def withCredentialsForUpstream(block) {
+    /*
+     * Kludge: Upstream repositories may be on a public BitBucket
+     * server. To access repos on BitBucket, I (JV) don't
+     * want to use the same key / password as for checkouts from
+     * staging repositories,
+     *
+     * Therefore, also look for another credentials with ID
+     * `workflow:push-upstream`. If it exists, then use these to
+     * push to upstrem repository. If no such credentials exist,
+     * use standard credentials.
+     *
+     * So, here we go:
+     */
+    def id1 = "workflow-push-upstream";
+    def id2 = scm.getCredentialsId();
+    def credentials = null;
+
+    for (StandardUsernameCredentials c : CredentialsProvider.lookupCredentials(StandardUsernameCredentials.class)) {
+      if (c.getId().equals(id1)) {
+        credentials = c;
+        break;
+      }
+    }
+    if (credentials == null) {
+      for (StandardUsernameCredentials c : CredentialsProvider.lookupCredentials(StandardUsernameCredentials.class)) {
+        if (c.getId().equals(id2)) {
+          credentials = c;
+          break;
+        }
+      }
+    }
+
+    println "Using upstream credentials ${credentials.getId()}: ${credentials.getDescription()}"
+
+    if (credentials instanceof SSHUserPrivateKey) {
+        sshagent([ credentials.getId() ]) {
+            // sh "rake \"workflow:push-upstream\""
+            block(null, null)
+        }
+    } else {
+        withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: credentials.getId(), passwordVariable: 'pass', usernameVariable: 'user']]) {
+            // sh "rake \"workflow:push-upstream[${user}, ${pass}]\""
+            block(user, pass)
+        }
+    }
+}
+
+return this;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/makelib/config-i686-pc-linux-gnu.make	Tue Jun 12 21:27:24 2018 +0000
@@ -0,0 +1,48 @@
+#
+# Definitions for target i686-pc-linux-gnu.
+#
+# based on eXept's:
+#
+# linux-elf/amd64_mode32: cpu: athlon64 in 32 bit mode os:linux-ELF options: +commonSymbol
+#
+#------------------ start of linux/amd64_mode32 ---------------------
+#
+# $Header: /cvs/stx/stx/configurations/linux-elf/amd64_mode32/defines,v 1.25 2016-03-17 22:32:46 stefan Exp $
+
+include $(MAKELIB)/config-linux-gnu.make
+include $(MAKELIB)/config-i686.make
+
+AS_CC=as
+AS_CC_CFLAG=
+ASFLAGS=--32
+
+
+# LD=ld -m elf_i386
+# STC_LD=ld -m elf_i386
+STC_LDFLAGS=$(CCCONFOPT) -L/usr/lib -Llib -L/usr/local/lib -L/usr/X11/lib
+CLASSLIB_LD=ld -m elf_i386
+LDFLAGS=$(CCCONFOPT) -Llib -L/usr/local/lib -L/usr/X11/lib -L/usr/lib \
+	-Wl,-rpath,'$$ORIGIN',-rpath,'$$ORIGIN/lib',-rpath,'$$ORIGIN/../lib'
+LD_REL_FLAG=-r
+
+#OPTIONAL_DL_LIB=-ldl
+#OPTIONAL_LIBDL_TARGET=libdld
+
+
+MAKE_ZLIB_ARG= -f Makefile.amd64_mode32
+MAKE_BZ2LIB_ARG= -f Makefile.amd64_mode32
+
+
+FFI_CFLAGS=$(CCCONFOPT)
+#------------------ end of linux-elf/amd64_mode32 ---------------------
+O_RULE=__STANDARD_O_RULE__
+EXTRA_LIBS=-ldl -lX11 -lXext
+
+# Following is a workaround for Debian / Ubuntu boxes.
+#
+ifeq ($(wildcard /usr/lib/i386-linux-gnu/libXft.so),)
+ifeq ($(wildcard /usr/lib/i386-linux-gnu/libXft.so.2),/usr/lib/i386-linux-gnu/libXft.so.2)
+LIB_XFT=-l:libXft.so.2 -l:libfontconfig.so.1
+endif
+endif
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/makelib/config-i686.make	Tue Jun 12 21:27:24 2018 +0000
@@ -0,0 +1,6 @@
+#
+# Common definition for i686 arch
+#
+CPU_INTERN_H=cpu_i386.h
+DEFS += -D__amd64_mode32__
+CCCONFOPT = -m32
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/makelib/config-linux-gnu.make	Tue Jun 12 21:27:24 2018 +0000
@@ -0,0 +1,100 @@
+#------------------ start of linux-elf/COMMON/defines ---------------------
+#
+# $Header: /cvs/stx/stx/configurations/linux-elf/COMMON/defines,v 1.92 2016-07-19 19:56:46 stefan Exp $
+include $(MAKELIB)/config.make
+
+CCARCHCOMMONOPT=-DELF
+#RELOCINSTVARS=+relocInstvars
+
+INTERN_H=linuxIntern.h
+CPUINTERN_H=$(INCLUDE)/linuxIntern.h
+OS=-DLINUX
+
+#OPTIONAL_DL_LIB=-ldl
+#OPTIONAL_LIBDL_TARGET=libdld
+
+
+
+MAKEFILE_EXTENSION=linux-elf
+
+# ALL_TARGET=LINUX_i386_ELF
+
+MAKEFLAGS += --no-print-directory
+MAKE_INCLUDE=include
+INSTALL=install
+INSTALL_BIN_CMD=install -s -m 0755
+INSTALL_SCRIPT_CMD=install -m 0755
+COMPRESSOR=gzip
+COMPRESSED_FILETYPE=targz
+SETUP_RULE=setup_linux
+
+# We need to use at most GCC 6.x, some code may fail to compile
+# with GCC 7 due to -Werror (`send.c` for instance). We won't fix
+# that for Smalltalk/X 6.x.x, will be fixed only for Smalltalk/X 8.0.0
+ifeq ($(CC),cc)
+  ifneq ($(shell which gcc-6),)
+    CC = gcc-6
+  else ifneq ($(shell which gcc-5),)
+    CC = gcc-5
+  else ifneq ($(shell which gcc-4),)
+    CC = gcc-4
+  endif
+endif
+
+
+STC_LEXLIB=-lfl
+
+#
+# use static librt for clock_gettime().
+# dynamic librt causes libpthread to be loaded that does not
+# work with stx' stack layout
+#
+
+OTHERLIBS=-ldl -Wl,-Bstatic,-lrt,-Bdynamic
+OTHERLIBS=-ldl -lrt
+OTHERLIBS=-ldl -lrt -lc
+STATIC_OTHERLIBS=
+LDFLAGS=-Llib -L/usr/local/lib -L/usr/X11/lib \
+	-Wl,-z,now,-rpath,'$$ORIGIN',-rpath,'$$ORIGIN/lib',-rpath,'$$ORIGIN/../lib',--wrap=memcpy
+
+SPECIAL_LIBRUN_RULE=allLINUX
+
+
+FORCE_LINK_LIBRUN=1
+
+# mhm - these should now only be in libview!
+
+
+ARCH_LIBSNMP=$(TOP)/libsnmp
+ARCH_CLASSLIBDIRS=$(ARCH_LIBSNMP)
+
+
+#
+# a netsnmp library
+#
+# if already in /usr/lib:
+# DYNAMIC_LINK_LIB_NETSNMP=-lnetsnmp
+# STATIC_LINK_LIB_NETSNMP=
+# TARGET_LIB_NETSNMP=
+#
+# if a local one should be built and used:
+DYNAMIC_LINK_LIB_NETSNMP=-L$(NETSNMP_LIBDIR)/.libs -L$(NETSNMP_LIBDIR) $(NETSNMP_LIBDIR)/.libs/libnetsnmp.a -lrt
+STATIC_LINK_LIB_NETSNMP=
+TARGET_LIB_NETSNMP=snmplib
+
+
+STATIC_LIB_X   =$(X_LIB_DIR)/libX11.a
+STATIC_LIB_XEXT=$(X_LIB_DIR)/libXext.a
+STATIC_LIB_XT  =$(X_LIB_DIR)/libXt.a
+STATIC_LIB_XAW =$(X_LIB_DIR)/libXaw.a
+STATIC_LIB_XMU =$(X_LIB_DIR)/libXmu.a
+
+# the following defs are for using the system's usb lib (only needed in some apps);
+USB_LIB_DIR=
+USB_LIB_ARG=-lusb
+USB_LIB_DIR_ARG=-L/usr/lib
+USB_MAKE_TARGET=
+
+MATRIXLOCK_DIR=$(TOP)/../exept/setup/ml
+MATRIXLOCK_LIB_ARG=-L$(MATRIXLOCK_DIR)/lib -lmxlin260 -lusb
+#------------------ end of linux-elf/COMMON/defines ---------------------
--- a/makelib/config-linux-i386.make	Fri May 11 12:10:26 2018 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,63 +0,0 @@
-# linux-elf/amd64_mode32: cpu: athlon64 in 32 bit mode os:linux-ELF options: +commonSymbol
-#
-#------------------ start of linux/amd64_mode32 ---------------------
-#
-# $Header: /cvs/stx/stx/configurations/linux-elf/amd64_mode32/defines,v 1.25 2016-03-17 22:32:46 stefan Exp $
-
-include $(MAKELIB)/config-linux.make
-
-CONF=config-linux-i386
-
-COMMONSYMBOLS=+commonSymbols
-SAVECOMMONSYMBOLS=+saveCommonSymbols
-STCARCHOPT=+sharedLibCode -staticMethods $(COMMONSTCCONFOPT)
-STC_LEXLIB=libfl/libfl_pic.a
-# STC_LEXLIB=-lfl
-
-DEFS=-D__amd64_mode32__ -DELF
-DLDEFS=-DHAS_DLOPEN
-
-CCCONFOPT=-m32
-
-AS_CC=as
-AS_CC_CFLAG=
-ASFLAGS=--32
-
-
-# LD=ld -m elf_i386
-# STC_LD=ld -m elf_i386
-STC_LDFLAGS=$(CCCONFOPT) -L/usr/lib -Llib -L/usr/local/lib -L/usr/X11/lib
-CLASSLIB_LD=ld -m elf_i386
-LDFLAGS=$(CCCONFOPT) -Llib -L/usr/local/lib -L/usr/X11/lib -L/usr/lib \
-	-Wl,-rpath,'$$ORIGIN',-rpath,'$$ORIGIN/lib',-rpath,'$$ORIGIN/../lib' 
-LD_REL_FLAG=-r
-
-#OPTIONAL_DL_LIB=-ldl
-#OPTIONAL_LIBDL_TARGET=libdld
-
-TARGET_RULE_EXTENSION=_shared
-O_EXT=.so
-# name of object files in binary directory
-BIN_O=.so
-
-MAKE_ZLIB_ARG= -f Makefile.amd64_mode32
-MAKE_BZ2LIB_ARG= -f Makefile.amd64_mode32
-
-# We have problems with FFI as of 2008-12-01
-# Remove the follwoing lines when the problems have been fixed
-#FFI_OBJS=
-#OPTIONAL_HAVE_FFI_ARG=
-#OPTIONAL_FFI_TARGET_IN_LIBRUN=
-# END FFI kludge
-FFI_CFLAGS=$(CCCONFOPT)
-#------------------ end of linux-elf/amd64_mode32 ---------------------
-O_RULE=__STANDARD_O_RULE__
-EXTRA_LIBS=-ldl -lX11 -lXext
-
-# Following is a workaround for Debian / Ubuntu boxes. 
-# 
-ifeq ($(wildcard /usr/lib/i386-linux-gnu/libXft.so),) 
-ifeq ($(wildcard /usr/lib/i386-linux-gnu/libXft.so.2),/usr/lib/i386-linux-gnu/libXft.so.2) 
-LIB_XFT=-l:libXft.so.2 -l:libfontconfig.so.1
-endif
-endif
--- a/makelib/config-linux-x86_64.make	Fri May 11 12:10:26 2018 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,79 +0,0 @@
-# linux-elf/x86_64: cpu: x86_64 in 64 bit mode os:linux-ELF options: +commonSymbol
-#
-#------------------ start of linux/x86_64 ---------------------
-#
-# $Header: /cvs/stx/stx/configurations/linux-elf/x86_64/defines,v 1.18 2016-08-29 08:52:44 stefan Exp $
-
-include $(MAKELIB)/config-linux.make
-
-CONF=config-linux-x86_64
-
-COMMONSYMBOLS=+commonSymbols
-SAVECOMMONSYMBOLS=+saveCommonSymbols
-STCARCHOPT=+sharedLibCode -staticMethods $(COMMONSTCCONFOPT)
-STC_LEXLIB=-lfl
-
-CPU_INTERN_H=cpu_x86_64.h
-
-DEFS=-D__x86_64__
-CCCONFOPT=-m64 -fPIC
-
-DLDEFS=-DHAS_DLOPEN
-
-NOSTDLIB=
-
-LDFLAGS=-Llib -L/usr/local/lib -L/usr/X11/lib \
-	-Wl,-z,now,-Bsymbolic,-rpath,'$$ORIGIN',-rpath,'$$ORIGIN/lib',-rpath,'$$ORIGIN/../lib'
-EXTRA_SHARED_LIBS=-ldl
-
-#OPTIONAL_DL_LIB=-ldl
-#OPTIONAL_LIBDL_TARGET=libdld
-
-TARGET_RULE_EXTENSION=_shared
-O_EXT=.so
-# name of object files in binary directory
-BIN_O=.so
-
-# We have problems with FFI as of 2008-12-01
-# Remove the follwoing lines when the problems have been fixed
-FFI_OBJS=
-OPTIONAL_HAVE_FFI_ARG=
-OPTIONAL_FFI_TARGET_IN_LIBRUN=
-# END FFI kludge
-
-# LIBJPEG=$(LIBJPEG_DIR)/libjpeg.a
-# LIBJPEG_FOR_COPY=$(LIBJPEG_DIR)/libjpeg.a
-# LIBJPEG_FOR_LINK=$(LIBJPEG_DIR)/libjpeg.a
-
-
-# a netsnmp library
-#
-# if already in /usr/lib:
-# DYNAMIC_LINK_LIB_NETSNMP=-lnetsnmp
-# STATIC_LINK_LIB_NETSNMP=
-# TARGET_LIB_NETSNMP=
-#
-# if a local one should be built and used (for 64bit, no -lrt is needed):
-DYNAMIC_LINK_LIB_NETSNMP=-L$(NETSNMP_LIBDIR)/.libs -L$(NETSNMP_LIBDIR) $(NETSNMP_LIBDIR)/.libs/libnetsnmp.a
-
-
-MATRIXLOCK_DIR=$(TOP)/../exept/setup/ml
-MATRIXLOCK_LIB_ARG=-L$(MATRIXLOCK_DIR)/lib -lmxlin64260
-MATRIXLOCK_SHARED_LIB_ARG=-lusb
-
-#.SUFFIXES: .o .so
-
-#.o.so:
-#	@-rm -f $*.so
-#	$(LD) -shared $(LDFLAGS) -o $*.so $*.o $(LOCAL_SHARED_LIBS)
-
-#------------------ end of linux-elf/x86_64 ---------------------
-O_RULE=__STANDARD_O_RULE__
-EXTRA_LIBS=-ldl -lX11 -lXext
-MAKE_ZLIB_ARG= "CFLAGS=-fPIC -O3 -DUSE_MMAP"
-FFI_OBJS=$(FFI_DIR)/build/src/*.o $(FFI_DIR)/build/src/x86/*.o
-FFI_DIR=libffi-3.0.10rc8
-OPTIONAL_HAVE_FFI_ARG=-DHAVE_FFI -I$(TOP)/librun/$(FFI_DIR)/build/include
-OPTIONAL_FFI_TARGET_IN_LIBRUN=ffi
-FFI_CC="$(CC) -m64 -fPIC"
-FFI_LD="ld -m elf_x84_64"
--- a/makelib/config-linux.make	Fri May 11 12:10:26 2018 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,301 +0,0 @@
-#------------------ start of linux-elf/COMMON/defines ---------------------
-#
-# $Header: /cvs/stx/stx/configurations/linux-elf/COMMON/defines,v 1.92 2016-07-19 19:56:46 stefan Exp $
-include $(MAKELIB)/config.make
-
-CONF_VENDOR=linux
-
-ARCH=linux
-ARCH_NAME=linux_elf
-DISTRIB_NAME=demoLinuxELF
-
-CCARCHCOMMONOPT=-DELF
-#RELOCINSTVARS=+relocInstvars
-
-INTERN_H=linuxIntern.h
-CPU_INTERN_H=cpu_i386.h
-CPUINTERN_H=$(INCLUDE)/linuxIntern.h
-
-MAKEFILE_EXTENSION=linux-elf
-TARGET_RULE_EXTENSION=_shared
-# ALL_TARGET=LINUX_i386_ELF
-
-MAKEFLAGS += --no-print-directory
-MAKE_INCLUDE=include
-INSTALL=install
-INSTALL_BIN_CMD=install -s -m 0755
-INSTALL_SCRIPT_CMD=install -m 0755
-COMPRESSOR=gzip
-COMPRESSED_FILETYPE=targz
-SETUP_RULE=setup_linux
-
-# We need to use at most GCC 6.x, some code may fail to compile
-# with GCC 7 due to -Werror (`send.c` for instance). We won't fix
-# that for Smalltalk/X 6.x.x, will be fixed only for Smalltalk/X 8.0.0
-ifeq (,$(shell which gcc-6))
-ifeq (,$(shell which gcc-5))
-ifeq (,$(shell which gcc-4))
-$(error Smalltalk/X requires GCC 4.x, GCC 5.x or GCC 6.x to compile, none found)
-else
-CC=gcc-4
-endif
-else
-CC=gcc-5
-endif
-else
-CC=gcc-6
-endif
-
-
-#
-# * `-fno-strict-aliasing` since it's almost impossible to write a memory
-#   manager without breaking C's aliasing rules. Better be on a safe side.
-#   Nice post about these things: http://blog.regehr.org/archives/1307
-#
-# * `-Wno-strict-aliasing` to turn of warnings about breaking aliasing rules.
-#   While one would think these wanings should be turned off when
-#   `-fno-strict-aliasing` is passed, apparently some GCCs bark anyway.
-#
-# * `-fno-stack-protector` is (was) required on Ubuntu brands where it's
-#   by default on. Smalltalk/X green thread implementation does not wotk
-#   well with it.
-#
-# * `-U_FORTIFY_SOURCE` is (was) required on Gentoo as a workaround for failing
-#   longjmp check when _FORTIFY_SOURCE is defined (which is enabled by default
-#   on Gentoo. Remove when fixed.
-#
-OPT=-fPIC -O2 -fno-strict-aliasing -Wno-strict-aliasing -fno-stack-protector -ggdb3 -pipe -U_FORTIFY_SOURCE
-
-STC_LEXLIB=
-
-OS=-DLINUX
-
-FFI_ARCH=x86
-FFI_CFLAGS=
-FFI_DIR=libffi-3.0.10rc8
-FFI_OBJS=$(FFI_DIR)/build/src/*.o $(FFI_DIR)/build/src/x86/*.o
-OPTIONAL_HAVE_FFI_ARG=-DHAVE_FFI -I$(TOP)/librun/$(FFI_DIR)/build/include
-OPTIONAL_FFI_TARGET_IN_LIBRUN=ffi
-FFI_CC="gcc -m32"
-FFI_LD='ld -m elf_i386'
-
-OBJNAME=.o
-BIN_O=.so
-O_EXT=.so
-SA_EXT=.so
-SO_NAME=.so
-
-#
-# use static librt for clock_gettime().
-# dynamic librt causes libpthread to be loaded that does not
-# work with stx' stack layout
-#
-
-OTHERLIBS=-ldl -Wl,-Bstatic,-lrt,-Bdynamic
-OTHERLIBS=-ldl -lrt
-OTHERLIBS=-ldl -lrt -lc
-STATIC_OTHERLIBS=
-LDFLAGS=-Llib -L/usr/local/lib -L/usr/X11/lib \
-	-Wl,-z,now,-rpath,'$$ORIGIN',-rpath,'$$ORIGIN/lib',-rpath,'$$ORIGIN/../lib',--wrap=memcpy
-
-SPECIAL_LIBRUN_RULE=allLINUX
-LIBRUNTARGET=librun.so
-
-FORCE_LINK_LIBRUN=1
-
-# mhm - these should now only be in libview!
-LIBVIEW_EXTRA_TARGETS=$(WORKSTAT1).so $(WORKSTAT2).so
-
-ARCH_LIBSNMP=$(TOP)/libsnmp
-ARCH_CLASSLIBDIRS=$(ARCH_LIBSNMP)
-
-#
-# where are the X libraries if we need them:
-#
-XDEFS=-DSHAPE -DXINERAMA -DXFT -DHAVE_FONTCONFIG
-XINCLUDE+=$(shell pkg-config --cflags xft)
-
-LIB_X=-lX11
-LIB_XEXT=-lXext
-LIB_XT= -lXt
-LIB_XAW=-lXaw
-LIB_XMU=-lXmu
-LIB_XMU=-lXt
-LIB_XINERAMA=-lXinerama
-LIB_XFT=$(shell pkg-config --libs xft)
-
-EXTRA_LIBS= -lX11 -lXext
-STATIC_EXTRA_LIBS= -lX11 -lXext
-
-X_LIB_DIR=/usr/X11/lib
-JPEGLIB_LD_ARG=
-LIBJPEG_DIR=$(TOP)/support/libjpeg-9
-JPEG_INCLUDE=-I$(LIBJPEG_DIR)
-
-LIBJPEG=$(LIBJPEG_DIR)/.libs/libjpeg.a
-LIBJPEG_FOR_COPY=$(LIBJPEG_DIR)/.libs/libjpeg.a
-LIBJPEG_FOR_LINK=$(LIBJPEG_DIR)/.libs/libjpeg.a
-
-#
-# a netsnmp library
-#
-# if already in /usr/lib:
-# DYNAMIC_LINK_LIB_NETSNMP=-lnetsnmp
-# STATIC_LINK_LIB_NETSNMP=
-# TARGET_LIB_NETSNMP=
-#
-# if a local one should be built and used:
-DYNAMIC_LINK_LIB_NETSNMP=-L$(NETSNMP_LIBDIR)/.libs -L$(NETSNMP_LIBDIR) $(NETSNMP_LIBDIR)/.libs/libnetsnmp.a -lrt
-STATIC_LINK_LIB_NETSNMP=
-TARGET_LIB_NETSNMP=snmplib
-
-
-STATIC_LIB_X   =$(X_LIB_DIR)/libX11.a
-STATIC_LIB_XEXT=$(X_LIB_DIR)/libXext.a
-STATIC_LIB_XT  =$(X_LIB_DIR)/libXt.a
-STATIC_LIB_XAW =$(X_LIB_DIR)/libXaw.a
-STATIC_LIB_XMU =$(X_LIB_DIR)/libXmu.a
-
-# the following defs are for using the system's usb lib (only needed in some apps);
-USB_LIB_DIR=
-USB_LIB_ARG=-lusb
-USB_LIB_DIR_ARG=-L/usr/lib
-USB_MAKE_TARGET=
-
-MATRIXLOCK_DIR=$(TOP)/../exept/setup/ml
-MATRIXLOCK_LIB_ARG=-L$(MATRIXLOCK_DIR)/lib -lmxlin260 -lusb
-
-#NOSTDLIB=-nostdlib
-NOSTDLIB=
-
-#
-# an extra rule for shared objects
-#
-.SUFFIXES: .a .o .so
-# use -nostdlib in combination with -lc -lgcc: we actually use some functions (like fstat) from libc (-lc)
-# and gcc (-lgcc) provides some low level routines that should be around
-.o.so:
-	@-rm -f $*.so
-	$(LD) $(NOSTDLIB) -shared $(LDFLAGS) -o $*.so $*.o $(LOCAL_SHARED_LIBS) $(EXTRA_SHARED_LIBS) -lc -lgcc
-
-.a.so:
-	rm -f $*.so
-	rm -rf tmp.a.so
-	mkdir tmp.a.so
-	cd tmp.a.so; $(AR) x ../$*.a
-	@-rm -f $*.so
-	$(LD) $(NOSTDLIB) -shared $(LDFLAGS) -o $*.so tmp.a.so/*.o $(LOCAL_SHARED_LIBS) $(EXTRA_SHARED_LIBS) -lc -lgcc
-	rm -rf tmp.a.so
-
-SHAREDLIBRULE=linuxSharedLib
-linuxSharedLib:
-	@-rm -f $(LIB)
-	$(LD) $(NOSTDLIB) -shared $(LDFLAGS) -o $(LIB) $(OBJS) $(LOCAL_SHARED_LIBS) $(EXTRA_SHARED_LIBS) -lc -lgcc
-
-#.st.s2:
-#       $(STC) $(STCFLAGS) $(CFLAGS) -C $*.st
-#       $(CC) $(CFLAGS) -S $*.c
-#       @-echo " stripping off useless .align directives ..."
-#       sed "/.align 4/d" < $*.s > $*.s2
-
-#.SUFFIXES: .s2 .s3
-#
-#.s2.s3:
-#       @-echo "   removing useless jumps ..."
-#       awk -f $(TOP)/rules/i386.script.awk < $*.s2 > $*.s3
-
-#
-# extra rules for space-optimized/speed-optimized modules modules
-#
-.SUFFIXES: .st .spc .spd
-
-.st.spc:
-	$(MAKE) STC_OPT="$(STC_OPT)" OPT="$(OPT)" STCOPT="$(STCOPT)" STFILE=$* KEEP=y __SPACEOPT_O_RULE__
-
-.st.spd:
-	$(MAKE) STC_OPT="$(STC_OPT)" OPT="$(OPT)" STCOPT="$(STCOPT)" STFILE=$* KEEP=y __SPEEDOPT_O_RULE__
-
-VERYBIG_STFILE_RULE=__VERYBIG_RULE__
-__VERYBIG_RULE__:
-	$(MAKE) $(O_RULE) STFILE=$(BIG_FILE) CC="$(CC)" STC_OPT="$(STC_OPT) -Os" OPT="$(OPT) -Os" SEPINITCODE="$(SEPINITCODE)"
-
-LONGNAME_RULE=__NORMAL_RULE2__
-__NORMAL_RULE2__:
-	$(MAKE) $(O_RULE) STFILE=$(LONGNAME_FILE) CC="$(CC)" STC_OPT="$(STC_OPT)" OPT="$(OPT)" SEPINITCODE="$(SEPINITCODE)"
-
-
-#AWK_VERBOSE=1
-AWK_DIFF=
-AWK_FIX=$(TOP)/rules/i386fixELF.awk
-
-SPACEOPT_O_RULE=__SPACEOPT_O_RULE__
-__SPACEOPT_O_RULE__:
-	@$(MAKE) STC=$(STC) CC=$(CC) STCOPT="$(STCOPT)" SEPINITCODE="$(SEPINITCODE)" $(STFILE).s
-	@-if [ -f $(STFILE).s ]; then \
-	    awk -f $(AWK_FIX) < $(STFILE).s > $(STFILE).s2; \
-	    if [ "$(AWK_DIFF)" != "" ]; then \
-		diff $(STFILE).s $(STFILE).s2; \
-	    fi; \
-	    if [ "$(AWK_VERBOSE)" != "" ]; then \
-		echo "# optimized $(STFILE).s -> $(STFILE).s2"; \
-		tail -5 $(STFILE).s2; \
-		echo; \
-	    fi; \
-	    $(AS) $(ASFLAGS) -o $(STFILE).o $(STFILE).s2; \
-	fi
-	@-if [ -f i_$(STFILE).s ]; then \
-	    awk -f $(AWK_FIX) < i_$(STFILE).s > i_$(STFILE).s2; \
-	    if [ "$(AWK_DIFF)" != "" ]; then \
-		diff i_$(STFILE).s i_$(STFILE).s2; \
-	    fi; \
-	    if [ "$(AWK_VERBOSE)" != "" ]; then \
-		echo "# optimized i_$(STFILE).s -> i_$(STFILE).s2"; \
-		tail -5 i_$(STFILE).s2; \
-		echo; \
-	    fi; \
-	    $(AS) $(ASFLAGS) -o i_$(STFILE).o i_$(STFILE).s2; \
-	fi
-	@if [ "$(KEEP)" != "y" ]; then \
-	    rm -f $(STFILE).s $(STFILE).s2 i_$(STFILE).s i_$(STFILE).s2; \
-	fi
-
-
-.SUFFIXES: .st .s2
-
-.st.s2:
-	@$(MAKE) $(SPEEDOPT_O_RULE) STCOPT="$(STCOPT)" KEEP=y STFILE=$* SEPINITCODE="$(SEPINITCODE)"
-
-SPEEDOPT_O_RULE=__SPEEDOPT_O_RULE__
-__SPEEDOPT_O_RULE__:
-	@$(MAKE) STC=$(STC) CC=$(CC) STCOPT="$(STCOPT)" SEPINITCODE="$(SEPINITCODE)" $(STFILE).s
-	@-if [ -f $(STFILE).s ]; then \
-	    awk -f $(AWK_FIX) < $(STFILE).s > $(STFILE).s2; \
-	    if [ "$(AWK_DIFF)" != "" ]; then \
-		diff $(STFILE).s $(STFILE).s2; \
-	    fi; \
-	    if [ "$(AWK_VERBOSE)" != "" ]; then \
-		echo "# optimized $(STFILE).s -> $(STFILE).s2"; \
-		tail -5 $(STFILE).s2; \
-		echo; \
-	    fi; \
-	    $(AS) $(ASFLAGS) -o $(STFILE).o $(STFILE).s2; \
-	fi
-	@-if [ -f i_$(STFILE).s ]; then \
-	    awk -f $(AWK_FIX) < i_$(STFILE).s > i_$(STFILE).s2; \
-	    if [ "$(AWK_DIFF)" != "" ]; then \
-		diff i_$(STFILE).s i_$(STFILE).s2; \
-	    fi; \
-	    if [ "$(AWK_VERBOSE)" != "" ]; then \
-		echo "# optimized i_$(STFILE).s -> i_$(STFILE).s2"; \
-		tail -5 i_$(STFILE).s2; \
-		echo; \
-	    fi; \
-	    $(AS) $(ASFLAGS) -o i_$(STFILE).o i_$(STFILE).s2; \
-	fi
-	@if [ "$(KEEP)" != "y" ]; then \
-	    rm -f $(STFILE).s $(STFILE).s2 i_$(STFILE).s i_$(STFILE).s2; \
-	fi
-
-O_RULE=$(SPEEDOPT_O_RULE)
-
-#------------------ end of linux-elf/COMMON/defines ---------------------
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/makelib/config-x86_64-pc-linux-gnu.make	Tue Jun 12 21:27:24 2018 +0000
@@ -0,0 +1,48 @@
+#
+# Definitions for target x86_66-pc-linux-gnu.
+#
+# based on eXept's:
+#
+# linux-elf/x86_64: cpu: x86_64 in 64 bit mode os:linux-ELF options: +commonSymbol
+#
+#------------------ start of linux/x86_64 ---------------------
+#
+# $Header: /cvs/stx/stx/configurations/linux-elf/x86_64/defines,v 1.18 2016-08-29 08:52:44 stefan Exp $
+
+include $(MAKELIB)/config-linux-gnu.make
+include $(MAKELIB)/config-x86_64.make
+
+LDFLAGS=-Llib -L/usr/local/lib -L/usr/X11/lib \
+	-Wl,-z,now,-Bsymbolic,-rpath,'$$ORIGIN',-rpath,'$$ORIGIN/lib',-rpath,'$$ORIGIN/../lib'
+EXTRA_SHARED_LIBS=-ldl
+
+# LIBJPEG=$(LIBJPEG_DIR)/libjpeg.a
+# LIBJPEG_FOR_COPY=$(LIBJPEG_DIR)/libjpeg.a
+# LIBJPEG_FOR_LINK=$(LIBJPEG_DIR)/libjpeg.a
+
+
+# a netsnmp library
+#
+# if already in /usr/lib:
+# DYNAMIC_LINK_LIB_NETSNMP=-lnetsnmp
+# STATIC_LINK_LIB_NETSNMP=
+# TARGET_LIB_NETSNMP=
+#
+# if a local one should be built and used (for 64bit, no -lrt is needed):
+DYNAMIC_LINK_LIB_NETSNMP=-L$(NETSNMP_LIBDIR)/.libs -L$(NETSNMP_LIBDIR) $(NETSNMP_LIBDIR)/.libs/libnetsnmp.a
+
+
+MATRIXLOCK_DIR=$(TOP)/../exept/setup/ml
+MATRIXLOCK_LIB_ARG=-L$(MATRIXLOCK_DIR)/lib -lmxlin64260
+MATRIXLOCK_SHARED_LIB_ARG=-lusb
+
+#.SUFFIXES: .o .so
+
+#.o.so:
+#	@-rm -f $*.so
+#	$(LD) -shared $(LDFLAGS) -o $*.so $*.o $(LOCAL_SHARED_LIBS)
+
+#------------------ end of linux-elf/x86_64 ---------------------
+O_RULE=__STANDARD_O_RULE__
+EXTRA_LIBS=-ldl -lX11 -lXext
+MAKE_ZLIB_ARG= "CFLAGS=-fPIC -O3 -DUSE_MMAP"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/makelib/config-x86_64.make	Tue Jun 12 21:27:24 2018 +0000
@@ -0,0 +1,7 @@
+#
+# Common definition for i686 arch
+#
+
+CPU_INTERN_H = cpu_x86_64.h
+DEFS += -D__x86_64__
+CCCONFOPT = -m64 -fPIC
--- a/makelib/config.make	Fri May 11 12:10:26 2018 +0100
+++ b/makelib/config.make	Tue Jun 12 21:27:24 2018 +0000
@@ -19,6 +19,36 @@
 # see redefinitions in vendor-define, config-define or individual Make.proto files
 
 #
+# Define C compiler "optimization: flags to use.
+#
+# * `-fno-strict-aliasing` since it's almost impossible to write a memory
+#   manager without breaking C's aliasing rules. Better be on a safe side.
+#   Nice post about these things: http://blog.regehr.org/archives/1307
+#
+# * `-Wno-strict-aliasing` to turn of warnings about breaking aliasing rules.
+#   While one would think these wanings should be turned off when
+#   `-fno-strict-aliasing` is passed, apparently some GCCs bark anyway.
+#
+# * `-fno-stack-protector` is (was) required on Ubuntu brands where it's
+#   by default on. Smalltalk/X green thread implementation does not wotk
+#   well with it.
+#
+# * `-U_FORTIFY_SOURCE` is (was) required on Gentoo as a workaround for failing
+#   longjmp check when _FORTIFY_SOURCE is defined (which is enabled by default
+#   on Gentoo. Remove when fixed.
+#
+OPT=-fPIC -O2 -fno-strict-aliasing -Wno-strict-aliasing -fno-stack-protector -ggdb3 -pipe -U_FORTIFY_SOURCE
+
+
+CONF=$(BUILD_TARGET)
+CONF_VENDOR=$(BUILD_TARGET)
+
+ARCH=$(CONF)
+ARCH_NAME=$(ARCH)
+DISTRIB_NAME=$(ARCH)
+
+
+#
 # target for local make
 #
 TARGET_DIR=$(TOP)/projects/smalltalk
@@ -56,16 +86,16 @@
 # MAKE=make
 
 # c-compiler to use
-CC=cc
-AS_CC=$(CC)
+CC ?= cc
+AS_CC ?= $(CC)
 AS_CC_CFLAG=-c
-CPP=/lib/cpp
+CPP = cpp
 
 # assembler for librun/asmhelp
-AS=as
+AS = as
 
 # linker for final executable
-LD=$(CC)
+LD = $(CC)
 # linker for classlibs
 CLASSLIB_LD=ld
 
@@ -85,7 +115,9 @@
 
 # misc
 STRIP=strip
+OBJCOPY=objcopy
 DIRNAME=dirname
+CHMOD=chmod
 #
 # all BSD-based expr-implementations are missing the substring function
 #
@@ -146,12 +178,13 @@
 GOODIES_STCOPT=+optspace2
 
 # common for all:
-STCCOMMONOPT=
+STCCOMMONOPT=+sharedLibCode -staticMethods +lineInfo +stLineInfo
 CCCOMMONOPT=$(OPTIONAL_HAVE_FFI_ARG)
 
 # to be redefined in arch/COMMON/defines
 STCARCHCOMMONOPT=
-CCARCHCOMMONOPT=
+CCARCHCOMMONOPT=-DELF
+
 
 # top be redefined in arch/<conf>/defines
 STCARCHOPT=
@@ -181,11 +214,6 @@
 # to the corresponding defines-file
 LIMITSUPERINCLUDE=
 
-#
-# include path for Xlib
-#
-XINCLUDE=-I/usr/include/X11
-
 # default for 'make includeLink'
 
 #INCLUDELINK_FILES= \
@@ -304,7 +332,6 @@
 
 # name of object files
 O=o
-O2=o
 
 # name of archives
 A=a
@@ -314,24 +341,27 @@
 
 # name of shared objects
 SO=so
+SONAME=.$(SO)
+SO_NAME=$(SONAME)
 
 # name of prelinked nonshared objects
-OBJ=obj
-OBJNAME=.obj
+OBJ=$(O)
+OBJNAME=.$(OBJ)
 
-# name of individual object files
-BIN_O=.o
+# name of individual object files (JV: is that true?)
+BIN_O=.$(SO)
 
 # extension of linked object files
 # (usually either '.a', '.o' or '.obj' on static-link systems
-# or '.so' on dynamic link systems.
-# (maybe even '.dll' some time later ...)
+# or '.so' or '.dll' on dynamic link systems.
+
+# shared lib link (default)
+O_EXT=.$(SO)
 
-# static link with prelinked relocatable modules:
-O_EXT=$(OBJNAME)
-# shared lib link:
-# O_EXT=.$(SO)
-# library link:
+# static link with prelinked relocatable modules (not used currently):
+# O_EXT=$(OBJNAME)
+
+# library link (not used currently):
 # O_EXT=.$(A)
 
 #
@@ -435,14 +465,14 @@
 #
 LINKOBJS=[A-Z]*.$(O)
 
-SA_EXT=.a
+
 A_EXT=.a
 STATIC_O_EXT=.o
 
 #
 # libraries as in the development tree;
 #
-LIBRUN   = $(LIBRUNDIR)/librun$(SA_EXT)
+LIBRUN   = $(LIBRUNDIR)/librun$(O_EXT)
 LIBBASIC = $(LIBBASICDIR)/libstx_libbasic$(O_EXT)
 LIBBOSS  = $(LIBBOSSDIR)/libstx_libboss$(O_EXT)
 LIBBASIC2= $(LIBBASIC2DIR)/libstx_libbasic2$(O_EXT)
@@ -515,7 +545,7 @@
 #
 #LINK_LIBDIR = lib/
 #LINK_LIBDIR = .
-LINK_LIBRUN   = $(LINK_LIBDIR)librun$(SA_EXT)
+LINK_LIBRUN   = $(LINK_LIBDIR)librun$(O_EXT)
 LINK_LIBBASIC = $(LINK_LIBDIR)libstx_libbasic$(O_EXT)
 LINK_LIBBOSS  = $(LINK_LIBDIR)libstx_libboss$(O_EXT)
 LINK_LIBBASIC2= $(LINK_LIBDIR)libstx_libbasic2$(O_EXT)
@@ -713,23 +743,26 @@
 # system and other (non-class) libraries
 # --------------------------------------
 
-X_LIB_DIR=/usr/lib
-STATIC_X_LIB_DIR=$(X_LIB_DIR)
-#X_LIB_DIR=/usr/X11/lib
+#
+# X11 Libraries (X, Xext, Xft and Xinerama)
+#
+XDEFS    = -DSHAPE
+XINCLUDE = $(shell pkg-config --cflags x11 xext)
+LIB_X    = $(shell pkg-config --libs   x11 xext)
 
-LIB_X          = -lX11
-LIB_XEXT       = -lXext
-LIB_XAW        = -lXaw
-LIB_XT         = -lXt
-LIB_XMU        = -lXmu
-LIB_XINERAMA   = -lXinerama
-LIB_XFT        = -lXft
+ifeq (yes, $(shell if pkg-config --exists xft; then echo "yes"; else echo "no"; fi))
+XDEFS    += -DXFT -DHAVE_FONTCONFIG
+XINCLUDE +=$(shell pkg-config --cflags xft)
+LIB_XFT   =$(shell pkg-config --libs   xft)
+endif
 
-STATIC_LIB_X   = $(LIB_X)
-STATIC_LIB_XEXT= $(LIB_XEXT)
-STATIC_LIB_XAW = $(LIB_XAW)
-STATIC_LIB_XT  = $(LIB_XT)
-STATIC_LIB_XMU = $(LIB_XMU)
+ifeq (yes, $(shell if pkg-config --exists xinerama; then echo "yes"; else echo "no"; fi))
+XDEFS    += -DXINERAMA
+XINCLUDE +=$(shell pkg-config --cflags xinerama)
+LIB_XINERAMA=$(shell pkg-config --libs xinerama)
+endif
+
+
 
 # used with sparc only:
 # OSI-ACSE library
@@ -739,39 +772,6 @@
 
 WHICH_ODBC=unixodbc
 
-#
-# used when building libxt (wdget lib)
-# PRELINK is used whith ld -r to form libxt.o
-# SO_LINK is used with shared lib generation
-#
-LIB_XT_PRELINK = $(STATIC_LIB_XAW) $(STATIC_LIB_XMU) $(STATIC_LIB_XT)
-LIB_XT_SO_LINK =
-
-#
-# libraries to link in at the end
-#
-LIBXEXT     = $(LIB_XEXT)
-LIBX        = $(LIB_X)
-LIBXT       = $(LIB_XT)
-LIBXAW      = $(LIB_XAW)
-LIBXMU      = $(LIB_XMU)
-LIBXINERAMA = $(LIB_XINERAMA)
-LIBXFT      = $(LIB_XFT)
-
-STATIC_LIBXEXT = $(STATIC_LIB_XEXT)
-STATIC_LIBX    = $(STATIC_LIB_X)
-STATIC_LIBXT   = $(STATIC_LIB_XT)
-STATIC_LIBXAW  = $(STATIC_LIB_XAW)
-STATIC_LIBXMU  = $(STATIC_LIB_XMU)
-
-#
-# in a package definition file, add the following:
-#
-#OPTIONAL_LIBXAW = $(LIB_XAW)
-#OPTIONAL_LIBXT  = $(LIB_XT)
-#OPTIONAL_LIBXMU = $(LIB_XMU)
-#OPTIONAL_LIBXM  = $(LIB_XM)
-
 OPTIONAL_LIBS= \
 	$(OPTIONAL_LIBXM) \
 	$(OPTIONAL_LIBXAW) \
@@ -834,29 +834,6 @@
 SPLINT_DIR=$(TOP)/support/tools/$(SPLINT_VSN)
 # SPLINT=$(SPLINT_DIR)/cscope
 
-#
-# the JPEG library
-#
-# undefine this in arch-specific config, if you cannot compile libjpeg
-# or if you want to link against a system dll
-JPEGREADER_precompiled=JPEGReader.$(O)
-LIBJPEG_CC=$(CC)
-LIBJPEG_OPT=$(OPT)
-
-LIBJPEG_DIR=$(TOP)/support/libjpeg-6a
-# LIBJPEG_DIR=$(TOP)/support/libjpeg-7
-LIBJPEG=$(LIBJPEG_DIR)/libjpeg.a
-LIBJPEG_RULE=$(LIBJPEG)
-LIBJPEG_FOR_COPY=$(LIBJPEG_DIR)/libjpeg.a
-LIBJPEG_FOR_LINK=$(LIBJPEG_DIR)/libjpeg.a
-
-# the following defs are for using the provided (not the system) libjpeg;
-# should work everywhere
-JPEGLIB_LD_ARG=-L$(LIBJPEG_DIR) -ljpeg
-JPEGLIB_MAKE_TARGET=libjpeg
-JPEGREADER_PRELINKED=JPEGReader_prelinked
-JPEG_INCLUDE=
-
 # --------------------------------------------------
 # the MSQL library
 #
@@ -871,90 +848,8 @@
 #MSQL_INCLUDE_DIR=/usr/local/include
 #MSQL_LIB_DIR=/usr/local/lib
 
-
 # --------------------------------------------------
-# the VGL library
-#
-# has to be explicitely added in the defines file,
-# by defining
-#       EXTRA_LIBS= ... $(LIB_VOGL) ...
-# and
-#       CC_GL_FLAGS= ... $(CC_VGL_FLAGS) ...
-#
-# Late note: these defs are now all in the PACKS definitions
-#
-VOGL_DIR     =$(SUPPORT_DIR)/VGL/vogl/src
-LIB_VOGL     =$(VOGL_DIR)/libvogl.a
-LIB_VOGL_SO  =$(VOGL_DIR)/libvogl$(SO_NAME)
-MAKEFILE_VOGL=Makefile.$(MAKEFILE_EXTENSION)
-CC_VOGL_INCLUDE=-I$(VOGL_DIR)
-CC_VOGL_FLAGS  =-DVGL $(CC_VOGL_INCLUDE)
-
-#
-# only redefined for systems which have a real GL (SGI, AIX)
-# or have an openGL (mesa, openGL)
-#
-LIB_GL=$(LIB_VOGL)
-LIB_GL_TARGET=libvogl
-
-#
-# the mesa (openGL) library (optional)
-#
-MESA_VSN=3.0
-MESA_VSNDEF=3_0
-
-MESA_REL_DIR =Mesa-$(MESA_VSN)
-
-MESA_DIR     =$(SUPPORT_DIR)/MESA/$(MESA_REL_DIR)
-MESA_LIB_DIR =$(MESA_DIR)/lib
-LIB_MESA     =$(MESA_LIB_DIR)/libMesaGL.a
-LIB_MESA_SO  =$(MESA_LIB_DIR)/libMesaGL$(SO_NAME)
-CC_MESA_INCLUDE=-I$(MESA_DIR)/include
-CC_MESA_FLAGS=-DMESA -DMESA$(MESA_VSNDEF) $(CC_MESA_INCLUDE)
-MAKEFILE_MESA=Makefile
-
-#
-# the next two should be redefined in architecture-specific
-# config files of systems which have true GL/openGL (i.e. SGI)
-# or have those includes somewhere else.
-#
-# the default here is to use VGL/MESA
-#
-CC_GL_FLAGS=$(CC_VOGL_FLAGS)
-CC_OPENGL_FLAGS=$(CC_MESA_FLAGS)
-
-# the next one should be defined in a packs file
-# to choose between GL/VGL and OpenGL/MESA
-#
-# for GL/VOGL:
-#   CC_3D_FLAGS=$(CC_GL_FLAGS)
-# for openGL/MESA:
-#   CC_3D_FLAGS=$(CC_OPENGL_FLAGS)
-#
-
-
-# Systems which have true GL/OpenGL, mnay redefine the above
-# for true GL:
-#   CC_GL_FLAGS=-DGLX -I<gl-include-path-if-any>
-#
-# for true OpenGL:
-#   CC_OPENGL_FLAGS=-DOPENGL -I<openGL-include-path-if-any>
-
-#
-# redefine this to the Makefile target in Mesa/Makefile
-# for your machine
-MAKE_MESA_TARGET=$(ARCH)
-
-#
-# only redefined for systems which have a real openGL (SGI, NT)
-#
-LIB_OPENGL=$(LIB_MESA)
-LIB_OPENGL_TARGET=libmesa
-
-OPENGL_INCLUDE=
-
-#
-# a netsnmp library
+# The netsnmp library
 #
 # if already in /usr/lib:
 # DYNAMIC_LINK_LIB_NETSNMP=-lnetsnmp
@@ -1049,7 +944,7 @@
 #
 # extra targets made in support (old: librun) / libview
 #
-SUPPORT_EXTRA_TARGETS=$(OPTIONAL_LIBGL_TARGET) $(OPTIONAL_LIBDL_TARGET)
+SUPPORT_EXTRA_TARGETS=$(OPTIONAL_LIBDL_TARGET)
 
 LIBVIEW_EXTRA_TARGETS=
 
@@ -1099,11 +994,6 @@
 # The concrete libs (for example gl vs. vgl) are set in the architecture
 # specific configurations.
 
-OPTIONAL_LIBGL_TARGET=
-OPTIONAL_GL_LIB=
-STATIC_OPTIONAL_GL_LIB=$(OPTIONAL_GL_LIB)
-OPTIONAL_LINK_GL_LIB=
-
 OPTIONAL_LIBOPENGL_TARGET=
 OPTIONAL_OPENGL_LIB=
 STATIC_OPTIONAL_OPENGL_LIB=$(OPTIONAL_OPENGL_LIB)
@@ -1113,38 +1003,6 @@
 STATIC_OPTIONAL_DL_LIB=$(OPTIONAL_DL_LIB)
 OPTIONAL_LINK_DL_LIB=
 
-#
-# to get GL/VGL, add in a packageList:
-#    OPTIONAL_LIBGL_TARGET=$(LIB_GL_TARGET)
-#    OPTIONAL_GL_LIB=$(LIB_GL)
-#
-# in addition, you have to decide which gl to use;
-# for vogl, add to the architecture config file:
-#    LIB_GL_TARGET=$(LIB_VOGL_TARGET)
-#    LIB_GL=$(LIB_VOGL)
-# for a real gl, add in an architecture config file:
-#    LIB_GL_TARGET=
-#    LIB_GL=-lgl_s
-
-OPTIONAL_LIBGL_TARGET=$(LIB_GL_TARGET)
-OPTIONAL_GL_LIB=$(LIB_GL)
-OPTIONAL_LINK_GL_LIB=$(LINK_LIB_GL)
-OPTIONAL_GL_DEFINE=$(CC_GL_DEFINE)
-CC_3D_FLAGS=$(CC_GL_FLAGS)
-
-#
-# to get openGL/mesa, add in a packageList:
-#    OPTIONAL_LIBOPENGL_TARGET=$(LIB_OPENGL_TARGET)
-#    OPTIONAL_OPENGL_LIB=$(LIB_OPENGL)
-#    OPTIONAL_LINK_OPENGL_LIB=$(LINK_LIB_OPENGL)
-#
-# in addition, you have to decide which openGL to use;
-# for mesa, add to the architecture config file:
-#    LIB_OPENGL_TARGET=$(LIB_MESA_TARGET)
-#    LIB_OPENGL=$(LIB_MESA)
-# for a real gl, add in an architecture config file:
-#    LIB_GL_TARGET=
-#    LIB_GL=-lopenGL_s
 
 # to get an ObjectFileLoader,
 # you have to decide which dynamic load facility to use;
@@ -1218,9 +1076,12 @@
 
 #
 # rule for target building
-# TARGET_RULE_EXTENSION will be appended
+# TARGET_RULE_EXTENSION will be appended.
 #
-TARGET_RULE_EXTENSION=_static
+# Note that static linking (_static) might weel be unsupported
+#
+#TARGET_RULE_EXTENSION=_static
+TARGET_RULE_EXTENSION=_shared
 TARGET=stx
 
 # -------------
--- a/makelib/definitions-w.make	Fri May 11 12:10:26 2018 +0100
+++ b/makelib/definitions-w.make	Tue Jun 12 21:27:24 2018 +0000
@@ -14,6 +14,19 @@
 #   USETCC=1     - tiny-C (tcc)   (suspended work - unfinished)
 #   USEMINGW64=1 - mingw-C        (ongoing work)
 
+!ifndef BUILD_TARGET
+! ifdef USEMINGW32
+BUILD_TARGET = i686-pc-mingw32
+! else
+!  ifdef USEMINGW64
+BUILD_TARGET = x86_64-w64-mingw32
+!  else
+BUILD_TARGET = unknown-unknown-unknown
+!  endif
+! endif
+!endif
+
+
 !ifdef USEVC
 ! undef USEBC
 !endif
@@ -317,6 +330,8 @@
 RFLAGS=
 RES=$(O)
 APP_LINKER=$(CC)
+STRIP=$(MINGWBINDIR)\strip.exe
+OBJCOPY=$(MINGWBINDIR)\objcopy.exe
 
 !    if defined(USEMINGW32)
 USE_ARG=-DUSEMINGW32=1
--- a/makelib/definitions.make	Fri May 11 12:10:26 2018 +0100
+++ b/makelib/definitions.make	Tue Jun 12 21:27:24 2018 +0000
@@ -185,12 +185,15 @@
 
 
 #------------------ start of rules/stdHeader2 ---------------------
+COMMONDEFS += -DBUILD_TARGET=\"$(BUILD_TARGET)\"
+COMMONSYMBOLS=+commonSymbols
+
 
 # collected compiler flags
 
 CC_OPTS=$(CCCOMMONOPT) $(CCARCHCOMMONOPT) $(CCARCHOPT) $(OPT) $(CCCONFOPT) $(CC_CMD_OPT)
 
-STC_OPTS=$(STCCOMMONOPT) $(STCARCHCOMMONOPT) $(STCARCHOPT) $(STCOPT) $(STCCONFOPT) $(STCLOCALOPT) $(STC_CMD_OPT)
+STC_OPTS=$(STCCOMMONOPT) $(STCARCHCOMMONOPT) $(STCARCHOPT) $(STCOPT) $(COMMONSTCCONFOPT) $(STCCONFOPT) $(STCLOCALOPT) $(STC_CMD_OPT)
 
 CC_DEFS=$(DEFS) $(COMMONDEFS) $(ARCHCOMMONDEFS) $(ARCHDEFS) $(OTHERDEFS) $(LOCALDEFS)
 
@@ -198,7 +201,7 @@
 
 MAIN_CFLAGS= -I$(INCLUDE) $(OS) $(ARCHOPT) $(CCCONFOPT) $(MAIN_OPT) $(DBG) $(CC_DEFS)
 
-STCFLAGS   = -CC="$(CC)" -Y$(INCLUDE) -I$(INCLUDE_IN) $(STCOPT) $(STC_OPTS) $(LIMITSUPERINCLUDE) $(FORCEAUTOLOAD) -scmType: auto -sha1
+STCFLAGS   = -CC="$(CC)" -Y$(INCLUDE) -I$(INCLUDE_IN) $(STCOPT) $(STC_OPTS) $(LIMITSUPERINCLUDE) $(FORCEAUTOLOAD) +lineInfo +stLineInfo -scmType: auto -sha1
 
 SNMP_CC_OPT=$(ARCHOPT) $(OPT)
 
@@ -206,4 +209,4 @@
 
 LINKOBJS   = $(OBJS)
 
-#------------------ end of rules/stdHeader2 ---------------------
\ No newline at end of file
+#------------------ end of rules/stdHeader2 ---------------------
--- a/makelib/rules-w.make	Fri May 11 12:10:26 2018 +0100
+++ b/makelib/rules-w.make	Tue Jun 12 21:27:24 2018 +0000
@@ -33,7 +33,7 @@
 SCMTYPE=auto
 !endif
 
-STCFLAGS=-Y$(STX_INCDIR) $(STCLOCALOPT) $(STCOPT) $(STC_MSGFORMAT) $(STC_CMD_OPT) -cSuffix=$(CSUFFIX) -scmType: $(SCMTYPE) -sha1
+STCFLAGS=-Y$(STX_INCDIR) $(STCLOCALOPT) $(STCOPT) $(STC_MSGFORMAT) $(STC_CMD_OPT) -cSuffix=$(CSUFFIX) +lineInfo +stLineInfo -scmType: auto -sha1
 
 # KEEPCFILE=1
 
@@ -147,8 +147,8 @@
 #------------------------------------------------------------------
 # MINGW32 defs here
 #------------------------------------------------------------------
-DEFS=-DWIN32 -D__MINGW__ -D__MINGW32__ -DDEBUG
-CFLAGS=$(CFLAGS1) $(CFLAGS2) $(LOCALINCLUDES) $(CLOCAL_INCL) $(CFLAGS_LOCAL)
+DEFS=-DWIN32 -D__MINGW__ -D__MINGW32__ -DDEBUG -DBUILD_TARGET="$(BUILD_TARGET)"
+CFLAGS=$(OPT) $(CFLAGS1) $(CFLAGS2) $(LOCALINCLUDES) $(CLOCAL_INCL) $(CFLAGS_LOCAL)
 LFLAGS = -L$(LIBDIR) $(LIB_BASE_LD_ARG)
 LIBFILES=$(LIBDIR_LIBRUN)\librun.lib
 # no resource compiler
@@ -162,11 +162,8 @@
 #------------------------------------------------------------------
 # MINGW64 defs here
 #------------------------------------------------------------------
-DEFS=-DWIN32 -D__MINGW__ -D__MINGW64__ -DDEBUG
-#CFLAGS=-O $(CFLAGS1) $(CFLAGS2) $(LOCALINCLUDES) $(CLOCAL_INCL) $(CFLAGS_LOCAL)
-#CFLAGS=-g $(CFLAGS1) $(CFLAGS2) $(LOCALINCLUDES) $(CLOCAL_INCL) $(CFLAGS_LOCAL)
-CWARNFLAGS=-Wno-int-conversion -Wno-pointer-to-int-cast -Wno-incompatible-pointer-types
-CFLAGS=-O2 $(CFLAGS1) $(CFLAGS2) $(CWARNFLAGS) $(LOCALINCLUDES) $(CLOCAL_INCL) $(CFLAGS_LOCAL)
+DEFS=-DWIN32 -D__MINGW__ -D__MINGW64__ -DDEBUG -DBUILD_TARGET="$(BUILD_TARGET)"
+CFLAGS=$(OPT) $(CFLAGS1) $(CFLAGS2) $(CWARNFLAGS) $(LOCALINCLUDES) $(CLOCAL_INCL) $(CFLAGS_LOCAL)
 LFLAGS=-g $(LIB_BASE_LD_ARG)
 LFLAGS2=-L$(LIBDIR)
 LIBFILES=$(LIBDIR_LIBRUN)/librun.lib
@@ -360,7 +357,9 @@
 		$(LIBINIT_FILE) $(IOBJS) $(OBJS)
 $ \
 		$(ALLLIB) $(X11_LIBS) -o $(OUTDIR_SLASH)$(LIBNAME).dll
-
+	$(OBJCOPY) --only-keep-debug $(OUTDIR)$(LIBNAME).dll $(OUTDIR)$(LIBNAME).dll.debug
+	$(STRIP) --strip-debug --strip-unneeded $(OUTDIR)$(LIBNAME).dll
+	$(OBJCOPY) --add-gnu-debuglink="$(OUTDIR)$(LIBNAME).dll.debug" $(OUTDIR)$(LIBNAME).dll
 	$(MAKE) -f bc.mak registerClassLib
 
 cLibRule:
--- a/makelib/rules.make	Fri May 11 12:10:26 2018 +0100
+++ b/makelib/rules.make	Tue Jun 12 21:27:24 2018 +0000
@@ -253,7 +253,7 @@
 
 #
 # unconditionally makes a new makefile
-# obsolete.
+# o$lete.
 #
 makeMakefile:
 	@echo "making $(MAKEFILE) for $(CONF) $(CONF_PACKS) ..."
@@ -688,7 +688,7 @@
 # debug - make with -g instead of -O
 #
 debug:
-	$(MAKE) OPT="-g $(OPT)" LIBRUN_OPT="-g $(LIBRUN_OPT)" STCOPT="$(STCOPT) +lineInfo +stLineInfo"
+	$(MAKE) OPT="-g $(OPT)" LIBRUN_OPT="-g $(LIBRUN_OPT)" STCOPT="$(STCOPT)"
 
 cfiles:
 	  for i in ""*.st""; do                          \
@@ -984,8 +984,8 @@
 
 registerClassLib: $(REGISTERED_PACKAGES_DIR)
 	@echo "registering $(MODULE):$(MODULE_DIR)/$(LIBNAME) in $(REGISTERED_PACKAGES_DIR)"
-	rm -f $(REGISTERED_PACKAGES_DIR)/$(LIBNAME)$(SO_NAME)
-	echo "lib ../../../../$(MODULE)/$(MODULE_DIR)/$(LIBNAME)$(SO_NAME)" > $(REGISTERED_PACKAGES_DIR)/$(LIBNAME)$(SO_NAME)
+	rm -f $(REGISTERED_PACKAGES_DIR)/$(LIBNAME)$(SONAME)
+	echo "lib ../../../../$(MODULE)/$(MODULE_DIR)/$(LIBNAME)$(SONAME)" > $(REGISTERED_PACKAGES_DIR)/$(LIBNAME)$(SONAME)
 
 $(REGISTERED_PACKAGES_DIR):
 	@mkdir $(REGISTERED_PACKAGES_DIR)
@@ -1016,15 +1016,15 @@
 
 obj::    obj$(TARGET_RULE_EXTENSION)
 
-obj_shared: newobj $(LIBNAME)$(SO_NAME)
-	
+obj_shared: newobj $(LIBNAME)$(SONAME)
+
 
 obj_shared_stub:
 	@-if [ -f $(STUBLIBNAME)$(OBJNAME) ]; then \
-	  echo "making shared stub object $(STUBLIBNAME)$(SO_NAME)"; \
-	  $(MAKE) $(STUBLIBNAME)$(SO_NAME); \
+	  echo "making shared stub object $(STUBLIBNAME)$(SONAME)"; \
+	  $(MAKE) $(STUBLIBNAME)$(SONAME); \
 	else \
-	  echo " ** $(STUBLIBNAME)$(SO_NAME) not made - no $(STUBLIBNAME)$(OBJNAME)"; \
+	  echo " ** $(STUBLIBNAME)$(SONAME) not made - no $(STUBLIBNAME)$(OBJNAME)"; \
 	fi
 
 obj_static: newobj
@@ -1087,6 +1087,14 @@
 	@echo "linking class object ..."
 	$(CLASSLIB_LD) $(LD_REL_FLAG) $(LD_OBJ_FLAGS) $^ $(LD_OBJ_LIBS) $(LD_REL_OBJ_LIBS) -o $@
 
+$(LIBNAME).$(SO): $(LIBNAME)$(OBJNAME)
+	@echo "linking class library ..."
+	$(LD) -shared $(LDFLAGS) -o $@ $< $(LOCAL_SHARED_LIBS) $(EXTRA_SHARED_LIBS)
+	$(OBJCOPY) --only-keep-debug $@ $@.debug
+	$(CHMOD) ugo-x $@.debug
+	$(STRIP) --strip-debug --strip-unneeded $@
+	$(OBJCOPY) --add-gnu-debuglink="$@.debug" $@
+
 newobj_staticLink::
 	@echo "linking static classLib object ..."
 	@$(CLASSLIB_LD) $(LD_REL_FLAG) $(LD_OBJ_FLAGS) \
@@ -1124,8 +1132,8 @@
 # to force creation of a shared object, even if not defined in config
 #
 sharedobj: newobj
-	@echo "making shared object $(LIBNAME)$(SO_NAME)"
-	$(MAKE) $(LIBNAME)$(SO_NAME)
+	@echo "making shared object $(LIBNAME)$(SONAME)"
+	$(MAKE) $(LIBNAME)$(SONAME)
 
 #
 # objstrip:
@@ -1675,7 +1683,7 @@
 # a pure C library
 cLibRule:
 	@-echo "linking ..."
-	$(LD) $(ST_LDFLAG) $(LDFLAGS) -shared -o $(CLIB)$(SO_NAME) \
+	$(LD) $(ST_LDFLAG) $(LDFLAGS) -shared -o $(CLIB)$(SONAME) \
 	      $(CRT0) \
 	      $(OPTIONAL_SHARED_LIB_PATH) \
 	      $(COBJS) $(LINK_ADDITIONAL_OBJS) \
@@ -1853,7 +1861,7 @@
 	fi
 
 installLibs::
-	@-if [ -f $(LIBNAME)$(SO_NAME) ]; then \
+	@-if [ -f $(LIBNAME)$(SONAME) ]; then \
 	  $(MAKE) installSharedLib; \
 	else \
 	  if [ -f $(LIBNAME)$(OBJNAME) ]; then \
@@ -1866,9 +1874,9 @@
 
 installSharedLib:: $(INSTALLLIB_DIR)
 	@if [ "$(VERBOSE)"x != "x" ]; then \
-	    echo "installing dynamic library: "$(LIBNAME)$(SO_NAME)" in "$(INSTALLLIB_DIR)" ..."; \
+	    echo "installing dynamic library: "$(LIBNAME)$(SONAME)" in "$(INSTALLLIB_DIR)" ..."; \
 	fi
-	$(INSTALL_LIB_CMD) $(LIBNAME)$(SO_NAME) $(INSTALLLIB_DIR)
+	$(INSTALL_LIB_CMD) $(LIBNAME)$(SONAME) $(INSTALLLIB_DIR)
 	@-if [ "$(THIS_IS_A_BASE_LIBRARY)"x != ""x ]; then  \
 	  if [ "$(MODULE)"x != ""x ]; then  \
 	    if [ "$(MODULE_DIR)"x != ""x ]; then  \
@@ -1885,7 +1893,7 @@
 	@if [ "$(VERBOSE)"x != "x" ]; then \
 	    echo "generating symbolic link to shared object in $(INSTALLBASE)/lib/lib ..."; \
 	fi
-	@cd $(INSTALLBASE)/lib/lib && $(LNS) -f ../../packages/$(MODULE)/$(MODULE_DIR)/$(LIBNAME)$(SO_NAME) .
+	@cd $(INSTALLBASE)/lib/lib && $(LNS) -f ../../packages/$(MODULE)/$(MODULE_DIR)/$(LIBNAME)$(SONAME) .
 
 installStaticLib:: $(INSTALLLIB_DIR)
 	@if [ "$(VERBOSE)"x != "x" ]; then \
@@ -2339,7 +2347,7 @@
 #
 # generate a libXXXInit.c file from classList/modulList.stc
 #
-$(LIBNAME)Init.o: libInit.cc Make.proto Make.spec 
+$(LIBNAME)Init.o: libInit.cc Make.proto Make.spec
 	$(CC) $(CFLAGS) -x c -c $< -o $@
 
 ntLibInit.c: $(LIBNAME)Init.c
@@ -2911,8 +2919,8 @@
 
 %.o: %.st
 	$(STC) $(STCFLAGS) $(STC_MSGFORMAT) $(CFLAGS_ARCH) $(CFLAGS) $(CFLAGS_LOCAL) -CC="$(CLASSLIB_CC)" -c $(PWD)/$<  && \
-	    $(TOUCH) $(PWD)/$@	
-	
+	    $(TOUCH) $(PWD)/$@
+
 %.c: %.st
 	$(STC) $(STCFLAGS) $(STC_MSGFORMAT) $(CFLAGS_ARCH) $(CFLAGS) $(CFLAGS_LOCAL) -CC="$(CLASSLIB_CC)" -C $(PWD)/$<
 
--- a/pipeline.groovy	Fri May 11 12:10:26 2018 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,304 +0,0 @@
-/*
- * Jenkins Pipeline definitions for Smalltalk/X jv-branch
- *
- * Following pipelines are defined here:
- *
- *  * `build()` - compiles and tests all supported configurations
- *    No artifacts are archived
- *
- *  * `integration()` - like `build()` pipeline, then all test pass on all
- *    configurations, archive artifacts and push staged changes to opstream
- *    (if approvec by the user)
- */
-
-import com.cloudbees.plugins.credentials.CredentialsProvider;
-import com.cloudbees.plugins.credentials.common.StandardUsernameCredentials;
-import com.cloudbees.jenkins.plugins.sshcredentials.SSHUserPrivateKey;
-
-/**
- * Supported configurations. To overwrite, do:
- *
- *     ...
- *     pipeline = load "pipeline.groovy"
- *     ...
- *     pipeline.configurations = [ 'NODE': 'qnx', 'ARCH' : 'mips' ]
- *     ...
- *
- * Default value: see below.
- */
-configurations = [
-    'NODE': ['windows' , 'linux' ],
-    'ARCH': ['i386', 'x86_64' ]
-]
-
-/** Branch to build. To overwrite do:
- *
- *     ...
- *     pipeline = load "pipeline.groovy"
- *     ...
- *     pipeline.branch = "issue-123"
- *     ...
- *
- * Default value:
- * For multibranch jobs, this is the value of BRANCH_NAME environment variable.
- * For normal jobs this is the value of 'Branch' field from SCM configuration.
- *
- * Due to Jenkins internals, the fallback to use SCM configuration must be done
- * by 'user' of this variable (i.e., use `branch != null ? branch : scm.getBranch())
- */
-branch = env.BRANCH_NAME
-
-/**
- * Workspace to use. To overwrite do:
- *
- *     ...
- *     pipeline = load "pipeline.groovy"
- *     ...
- *     pipeline.workspace = "some-other-job"
- *     ...
- *
- * Default value:
- * Name of current job.
- */
-workspace = env.JOB_NAME
-
-/*
- * "Default" pipeline:
- */
-def build() {
-    stage ( "Build" ) {
-        matrix ( configurations ) {
-            stage ( "Checkout - {${env.NODE}-${env.ARCH}} " ) {
-                sshagent([ scm.getCredentialsId() ]) {
-                    sh  """
-                        if [ -f Jenkinsfile.rb ]; then
-                            hg pull --ssh ssh ${scm.getSource()}
-                        else
-                            hg clone --ssh ssh ${scm.getSource()} .
-                        fi
-                        hg up ${branch != null ? branch : scm.getBranch()}
-                        """
-                    sh  "ruby Jenkinsfile.rb --project \"stx:jv-branch\" --arch ${env.ARCH}  update"
-                }
-            }
-
-            stage ( "Compile - {${env.NODE}-${env.ARCH}}") {
-                sh "ruby Jenkinsfile.rb --project \"stx:jv-branch\" compile"
-            }
-        }
-    }
-
-    stage ( "Test" ) {
-        matrix ( configurations ) {
-            stage ( "Test - {${env.NODE}-${env.ARCH}}") {
-                /*
-                 * Some tests requires display, so:
-                 *
-                 *  * on *NIX hosts, launch Xvfb
-                 *  * on Windows, do nothing. Windows slave must be configured
-                 *    so it has an access to interactive window station (which
-                 *    means it has to run under user session, not as a service)
-                 */
-                if ( isUnix() ) {
-                    wrap([$class: 'Xvfb', autoDisplayName: true, additionalOptions: '-screen 0 1024x768x24 -pixdepths 24 4 8 15 16 32', parallelBuild: true]) {
-                        sh "ruby Jenkinsfile.rb --project \"stx:jv-branch\" test"
-                    }
-                } else {
-                    sh "ruby Jenkinsfile.rb --project \"stx:jv-branch\" test"
-                }
-                junit allowEmptyResults: true, testResults: "reports/*build${env.BUILD_NUMBER}*.xml"
-            }
-        }
-    }
-}
-
-/*
- * "Integration" pipeline
- */
-def integration() {
-    build()
-    /*
-     * If a single test fails, abort the pipeline. There's no point
-     * archiving a broken build.
-     */
-    println "Smalltalk/X built, job status is: ${currentBuild.result}"
-    if ( currentBuild.result == 'UNSTABLE' ) {
-        return;
-    }
-    artifacts()
-
-    /*
-     * Check if there are changes to be pushed to upstream. If so,
-     * ask user to approve that push
-     */
-    if ( changes() ) {
-        def integrate = false;
-
-        if (env.JENKINS_URL == "https://swing.fit.cvut.cz/jenkins/") {
-            integrate = true;
-        } else {
-            integrate = input(message: 'Integrate all staged changes to upstream?',
-                              parameters: [
-                                    booleanParam(name: "Integrate changes",
-                                         description: 'If checked, all staged changes will be pushed to an upstream repository',
-                                         defaultValue: true)]);
-        }
-        if ( integrate ) {
-            push()
-        }
-    }
-}
-
-/*
- * Utility. Return true, if there are any changes to be pushed to an upstream,
- * false othervise.
- */
-def changes() {
-    changes = false;
-    matrix ( configurations ) {
-        withCredentialsForUpstream() { user, pass ->
-            status = sh ( script: "rake \"workflow:out-upstream[${user}, ${pass}]\"", returnStatus: true)
-        }
-        changes = status == 0;
-    }
-    return changes;
-}
-
-def combine(configurations, axes = null, axis = 0, partial = new HashMap(), combinations = []) {
-    def axes0 = axes
-    if (axes0 == null) {
-        axes0 = configurations.keySet().toArray();
-    }
-    if ( axis < axes0.length ) {
-        for ( value in configurations[axes0[axis]] ) {
-            def combined = partial.clone()
-            combined[axes0[axis]] = value
-            combine(configurations, axes0, axis + 1, combined, combinations)
-        }
-    } else {
-        combinations.add(partial)
-    }
-    return combinations;
-}
-
-
-def matrix(configurations, block) {
-    def combinations = combine(configurations).toArray()
-    def branches = [failFast: true]
-    for (i = 0; i < combinations.length; i++) {
-        def index = i
-        def conf = combinations[i];
-        branches["${conf.NODE}-${conf.ARCH}"] = {
-            node ( conf.NODE ) {
-                def newEnv = []
-                for (k in conf.keySet()) {
-                    newEnv.add("${k}=${conf[k]}")
-                }
-                withEnv ( newEnv ) {
-                    ws ("workspace/${workspace}/${env.ARCH}") {
-                        block()
-                    }
-                }
-            }
-        }
-    }
-    parallel branches
-}
-
-
-
-def any(configurations, block) {
-    def axes = configurations.keySet().toArray()
-    def conf = [:]
-    for (axis in axes) {
-        conf[axis] = configurations[axis][0]
-    }
-    node ( conf.NODE ) {
-        def newEnv = []
-        for (k in conf.keySet()) {
-            newEnv.add("${k}=${conf[k]}")
-        }
-        withEnv ( newEnv ) {
-            ws ("workspace/${workspace}/${env.ARCH}") {
-                block()
-            }
-        }
-    }
-}
-
-
-def artifacts() {
-    matrix ( configurations ) {
-        stage ( "Artifacts - {${env.NODE}-${env.ARCH}}") {
-            sh "ruby Jenkinsfile.rb --project \"stx:jv-branch\" artifacts"
-            archiveArtifacts artifacts: "artifacts/*build${env.BUILD_NUMBER}*.zip, artifacts/*build${env.BUILD_NUMBER}*.bz2, artifacts/*build${env.BUILD_NUMBER}*.sha256", fingerprint: true//, onlyIfSuccessful: true
-        }
-    }
-}
-
-/**
- * Push changes to upstream reporitory. To be called after a successful
- * build. See #build()
- */
-def push() {
-    any ( configurations ) {
-        stage ( "Push to upstream" ) {
-            withCredentialsForUpstream { user, pass ->
-                sh "rake \"workflow:push-upstream[${user}, ${pass}]\""
-            }
-        }
-    }
-}
-
-/*
- * Utility. Executes given block with credentials for upstream repository.
- */
-def withCredentialsForUpstream(block) {
-    /*
-     * Kludge: Upstream repositories may be on a public BitBucket
-     * server. To access repos on BitBucket, I (JV) don't
-     * want to use the same key / password as for checkouts from
-     * staging repositories,
-     *
-     * Therefore, also look for another credentials with ID
-     * `workflow:push-upstream`. If it exists, then use these to
-     * push to upstrem repository. If no such credentials exist,
-     * use standard credentials.
-     *
-     * So, here we go:
-     */
-    def id1 = "workflow-push-upstream";
-    def id2 = scm.getCredentialsId();
-    def credentials = null;
-
-    for (StandardUsernameCredentials c : CredentialsProvider.lookupCredentials(StandardUsernameCredentials.class)) {
-      if (c.getId().equals(id1)) {
-        credentials = c;
-        break;
-      }
-    }
-    if (credentials == null) {
-      for (StandardUsernameCredentials c : CredentialsProvider.lookupCredentials(StandardUsernameCredentials.class)) {
-        if (c.getId().equals(id2)) {
-          credentials = c;
-          break;
-        }
-      }
-    }
-
-    println "Using upstream credentials ${credentials.getId()}: ${credentials.getDescription()}"
-
-    if (credentials instanceof SSHUserPrivateKey) {
-        sshagent([ credentials.getId() ]) {
-            // sh "rake \"workflow:push-upstream\""
-            block(null, null)
-        }
-    } else {
-        withCredentials([[$class: 'UsernamePasswordMultiBinding', credentialsId: credentials.getId(), passwordVariable: 'pass', usernameVariable: 'user']]) {
-            // sh "rake \"workflow:push-upstream[${user}, ${pass}]\""
-            block(user, pass)
-        }
-    }
-}
-
-return this;
--- a/rakelib/clean.rake	Fri May 11 12:10:26 2018 +0100
+++ b/rakelib/clean.rake	Tue Jun 12 21:27:24 2018 +0000
@@ -34,7 +34,10 @@
   task :pre
   task :post
 
-  task :main => %i(stc librun include packages)
+  task :main => %i(stc librun include packages) do
+    rm BUILD_DIR / 'stx-config.make'
+    rm BUILD_DIR / 'stx-config.rake'
+  end
 
   task :include do
     rm_rf BUILD_DIR / 'stx' / 'include'
@@ -42,13 +45,13 @@
 
   task :stc do
     chdir BUILD_DIR / 'stx' / 'stc' do
-      ; make 'clobber';
+      make 'clobber';
     end
   end
 
   task :librun do
     chdir BUILD_DIR / 'stx' / 'librun' do
-      ; make 'clobber'
+      make 'clobber'
     end
   end
 
@@ -68,7 +71,7 @@
       #if File.exist? pkg_wc
       chdir pkg_wc do
         case
-          when unix? && File.exist?('Makefile')
+          when unix? && File.exist?('makefile')
             make 'clean'
           when win32? && File.exist?('bmake.bat')
             make 'clean'
@@ -86,7 +89,7 @@
       #if File.exist? pkg_wc then
       chdir pkg_wc do
         case
-          when unix? && File.exist?('Makefile')
+          when unix? && File.exist?('makefile')
             make 'clobber'
           when win32? && File.exist?('bc.mak')
             make 'clobber'
--- a/rakelib/compile.rake	Fri May 11 12:10:26 2018 +0100
+++ b/rakelib/compile.rake	Tue Jun 12 21:27:24 2018 +0000
@@ -13,11 +13,11 @@
   LIBRUN = STX_TOP_DIR / 'librun' / 'librun.so'
 end
 
-# Return true if stx:stc and stx:librun sources should be removed as soon 
-# as STC or librun is built. False otherwise (sources are not removed by 
+# Return true if stx:stc and stx:librun sources should be removed as soon
+# as STC or librun is built. False otherwise (sources are not removed by
 # rakefiles)
 #
-# Indeed this is a feeble protection as it's easy to trick this method. But 
+# Indeed this is a feeble protection as it's easy to trick this method. But
 # the goal is not to protect sources but rather have a secondaty measure to
 # avoid sources to leak (for exaple, due to a bug in packaging scripts).
 # Unathorized subjects should not be able to checkout sources in first instance.
@@ -25,126 +25,79 @@
   !(core_developer? or ENV['RETAIN_STX_AND_LIBRUN_SOURCE'] == 'yespleaseretain!')
 end
 
-def rm_rf_all_in_except(directory, exceptions = [])
-  if File.directory?(directory)
-    Dir.foreach(directory) {|each| rm_rf File.join(directory, each) if (each != '.' and each != '..' and !exceptions.include?(each))}
-  else
-    rm_f directory
-  end
-end
+STC_BINARY_FILES = [
+    'cpu_arm.h',
+    'cpu_i386.h',
+    'cpu_x86_64.h',
+    'cpu_ia64.h',
+    'stc.h',
+    'stcIntern.h',
+    'stcVMdata.h',
+    'stcVMoffsets.h',
+    'stxAsmMacros.h',
+    'stxNames.h',
+    'stxOSDefs.h',
+    'stxTypeMacros.h',
+    'thinlocks.h',
+    'symbols.stc.seed',
+    'version.h',
+    'sha1.h',
+    'sha1.c',
+    'README',
 
-case
-  when unix?
-    STC_BINARY_FILES = [
-        'cpu_alpha.h',
-        'cpu_arm.h',
-        'cpu_hppa.h',
-        'cpu_i386.h',
-        'cpu_x86_64.h',
-        'cpu_ia64.h',
-        'cpu_mc68k.h',
-        'cpu_mc88k.h',
-        'cpu_mips.h',
-        'cpu_ns32k.h',
-        'cpu_power.h',
-        'cpu_s390.h',
-        'cpu_sparc.h',
-        'cpu_vax.h',
-        'stc.h',
-        'stcIntern.h',
-        'stcVMdata.h',
-        'stcVMoffsets.h',
-        'stxAsmMacros.h',
-        'stxNames.h',
-        'stxOSDefs.h',
-        'stxTypeMacros.h',
-        'symbols.stc.seed',
-        'version.h',
-        'README',
+    # UNIX specific
+    'stx-config.sh',
+    'linuxIntern.h',
+    'freeBSDIntern.h',
+    'macIntern.h',
+    'Make.proto',
+    'stc',
+    'stc.1',
 
-        #unix specific
-        'stx-config.sh',
-        'linuxIntern.h',
-        'macIntern.h',
-        'Make.proto',
-        'stc',
-        'stc.1',
-    ]
-    LIBRUN_BINARY_FILES1 = %w(
-        libffi-3.2.1
-        md5.h
-        mcompiler.h
-        main.c
-        librun.so
-        librun.a
-        symlist.c
-        Make.proto
-        makefile
-    )
-    LIBRUN_BINARY_FILES2 = []
+    # Windows specific
+    'stx-config.bat',
+    'mingwmake.bat',
+    'ntIntern.h',
+    'nt.h',
+    'Make.proto',
+    'stc.exe',
+]
 
-  when win32?
-    STC_BINARY_FILES = [
-        'cpu_alpha.h',
-        'cpu_arm.h',
-        'cpu_hppa.h',
-        'cpu_i386.h',
-        'cpu_x86_64.h',
-        'cpu_ia64.h',
-        'cpu_mc68k.h',
-        'cpu_mc88k.h',
-        'cpu_mips.h',
-        'cpu_ns32k.h',
-        'cpu_power.h',
-        'cpu_s390.h',
-        'cpu_sparc.h',
-        'cpu_vax.h',
-        'stc.h',
-        'stcIntern.h',
-        'stcVMdata.h',
-        'stcVMoffsets.h',
-        'stxAsmMacros.h',
-        'stxNames.h',
-        'stxOSDefs.h',
-        'stxTypeMacros.h',
-        'symbols.stc.seed',
-        'version.h',
-        'README',
+LIBRUN_BINARY_FILES = [
+    'libffi-3.2.1',
+    'md5.h',
+    'mcompiler.h',
+    'main.c',
+    'hmm.h',
+    'hmm.c',
+    'jsupport.h',
+    'debug.h',
+    'debug.c',
 
-        #windows specific
-        'stx-config.bat',
-        'mingwmake.bat',
-        'ntIntern.h',
-        'nt.h',
-        'Make.proto',
-        'stc.exe',
-    ]
-    LIBRUN_BINARY_FILES1 = %w(
-        libffi-3.2.1
-        md5.h
-        mcompiler.h
-        main.c
-        objmingw
-        bc.mak
-        mingwmake.bat
-        buildDate.h
-        genDate.com
-    )
+    # UNIX specific
+    'librun.so',
+    'librun.a',
+    'symlist.c',
+    'Make.proto',
+    'makefile',
 
-    LIBRUN_BINARY_FILES2 = %w(librun.dll librun.lib)
-
-  else
-    raise Exception.new('Unsupported platform')
-end
+    # Windows specific
+    'bc.mak',
+    'mingwmake.bat',
+    'buildDate.h',
+    'genDate.com',
+    File.join('objmingw' , 'librun.dll'),
+    File.join('objmingw' , 'librun.lib'),
+]
 
 def cleanup_stc
   if should_remove_librun_and_stc_sources
     puts 'Cleaning up stc...'
     begin
-      rm_rf_all_in_except(STX_TOP_DIR / 'stc', STC_BINARY_FILES)
+      rm_rf_ex STX_TOP_DIR / 'stc', exceptions: STC_BINARY_FILES
     rescue
-      # When something goes wrong, be safe and remove whole directory
-      rm_rf STX_TOP_DIR / 'stc'
+      # When something goes wrong, be safe and remove whole directory...
+      rm_rf    STX_TOP_DIR / 'stc'
     end
   end
 end
@@ -153,26 +106,14 @@
   if should_remove_librun_and_stc_sources
     puts 'Cleaning up librun...'
     begin
-      rm_rf_all_in_except(STX_TOP_DIR / 'librun', LIBRUN_BINARY_FILES1)
-      rm_rf_all_in_except(STX_TOP_DIR / 'librun' / 'objmingw', LIBRUN_BINARY_FILES2) if win32?
+      rm_rf_ex STX_TOP_DIR / 'librun', exceptions: LIBRUN_BINARY_FILES
     rescue
-      # When something goes wrong, be safe and remove whole directory
-      rm_rf STX_TOP_DIR / 'librun'
+      # When something goes wrong, be safe and remove whole directory...
+      rm_rf    STX_TOP_DIR / 'librun'
     end
   end
 end
 
-# Setup flags for GCC (both, real GCC and MinGW)
-GCC_CFLAGS_OPT = ARCH == 'i386' ? '-O' : ''
-GCC_CFLAGS_DBG = core_developer? ? '-ggdb3' : ''
-GCC_CFLAGS_PIC = win32? ? '' : '-fPIC'
-GCC_CFLAGS = "-pipe -fno-omit-frame-pointer -fno-stack-protector -fno-strict-aliasing -fwrapv #{GCC_CFLAGS_PIC} #{GCC_CFLAGS_OPT} #{GCC_CFLAGS_DBG}"
-
-# Sigh, for MINGW 5.x.x coming with MSYS2 we have to force C language as 
-# Borland make and build files are using funny suffixes to confuse GCC's 
-# language detection. Triple sigh here!
-GCC_CFLAGS += ' -x c' if win32?
-
 namespace :compile do
 
   task :all => %i(prereq pre main post)
@@ -184,7 +125,6 @@
 
   task :main => %i(
       config
-      libraries
       stc
       librun
       application
@@ -198,38 +138,75 @@
 
 
   task :config => [ STX_CONF_DIR / 'stx-config.make' ]
+  task :config => [ BUILD_DIR / 'stx' / 'rules' / 'stmkmf' ] if unix?
+  task :config => [ BUILD_DIR / 'stx' / 'rules' / 'stdHeader_bc' ] if win32?
+  task :config => [ BUILD_DIR / 'stx' / 'rules' / 'stdRules_bc' ] if win32?
 
-  directory STX_CONF_DIR do 
+
+  directory STX_CONF_DIR do
     mkdir_p STX_CONF_DIR
   end
-    
+
   file STX_CONF_DIR / 'stx-config.make' => STX_CONF_DIR  do
     makelib_dir = Pathname.new(File.expand_path('makelib')).relative_path_from(Pathname.new(BUILD_DIR / 'stx' ))
-    os = nil
-    case 
-    when linux? 
-      os = 'linux'
-    when win32?
-      os = 'mingw32'
-    else
-      raise Exception.new('Unssuported operating system')
+    File.open(STX_CONF_DIR / 'stx-config.make', 'w') do | f |
+      f.puts "BUILD_TARGET ?= #{BUILD_TARGET}"
+      if defined? CC 
+        f.puts "CC ?= #{CC}"
+      end
+    end
+  end
+
+  file BUILD_DIR / 'stx' / 'rules' / 'stmkmf' => [ BUILD_DIR / 'stx' / 'rules' ] do
+    if not File.exist? BUILD_DIR / 'stx' / 'rules' / 'stmkmf'
+      chdir BUILD_DIR / 'stx' / 'rules' do
+        # Argh, cannot use ln_s as it cannot make symlinks with relative
+        # paths. Sigh,     
+        sh "ln -s \'../../../bin/stmkmf.rb\' stmkmf"
+      end
     end
+  end
 
-    File.open(STX_CONF_DIR / 'stx-config.make', 'w') do | f |
-      f.puts "BUILD_TARGET ?= #{os}-#{ARCH}"
+  file BUILD_DIR / 'stx' / 'rules' / 'stdHeader_bc' => [ BUILD_DIR / 'stx' / 'rules' ] do
+    top = "#{File.absolute_path(BUILD_DIR).gsub("/", "\\")}\\stx"
+    File.open(BUILD_DIR / 'stx' / 'rules' / 'stdHeader_bc', "w") do | f |
+      f.puts <<-CONTENTS      
+# Do not edit! Automatically generated by rake (rakelib/#{File.basename(__FILE__)})
+#
+!IF !defined(TOP) 
+TOP=#{top}
+!ENDIF
+MAKELIB = $(TOP)\\..\\..\\makelib
+!INCLUDE $(MAKELIB)\\definitions-w.make
+CONTENTS
     end
-  end 
+  end
+
+  file BUILD_DIR / 'stx' / 'rules' / 'stdRules_bc' => [ BUILD_DIR / 'stx' / 'rules' ] do
+    top = "#{File.absolute_path(BUILD_DIR).gsub("/", "\\")}\\stx"
+    File.open(BUILD_DIR / 'stx' / 'rules' / 'stdRules_bc', "w") do | f |
+      f.puts <<-CONTENTS      
+# Do not edit! Automatically generated by rake (specs/#{File.basename(__FILE__)})
+#
+!IF !defined(TOP) 
+TOP=#{top}
+!ENDIF
+MAKELIB = $(TOP)\\..\\..\\makelib
+!INCLUDE $(MAKELIB)\\rules-w.make
+CONTENTS
+    end        
+  end
 
   rule 'makefile' do |t|
-    chdir File.dirname(t.name) do; sh "'#{STX_TOP_DIR / 'rules' / 'stmkmf'}'"; end
+    chdir File.dirname(t.name) do
+      sh "'#{STX_TOP_DIR / 'rules' / 'stmkmf'}'"
+    end
   end
 
   task :stc do
-    (linux? and x86_64? and ARCH == 'i386') ? (stx_make_flags='STC_LEXLIB=libfl/libfl_pic.a') : (stx_make_flags='')
-
     chdir STX_TOP_DIR / 'stc' do
       begin
-        make stx_make_flags
+        make
         cleanup_stc
       rescue Exception => e
         cleanup_stc
@@ -254,7 +231,7 @@
           if win32_wine?
             (mkdir 'libffi' / 'build_win32' / 'objbc') unless File.exist?('libffi' / 'build_win32' / 'objbc')
           end
-          # A workaround for Windows 10 & ancient Borland make which 
+          # A workaround for Windows 10 & ancient Borland make which
           # tend to crash there when trying to recompile already compiled
           # librun. Sigh, we have to move away from it as soon as possible!
           (touch 'stxmain.c') if (win32? and File.exist? 'stxmain.c')
@@ -278,18 +255,6 @@
     directory STX_TOP_DIR / 'lib' / 'bc'
     task :librun => STX_TOP_DIR / 'lib' / 'bc'
   end
-
-  task :'libraries'
-
-  if unix?
-    vogl_dir = STX_TOP_DIR / 'support' / 'VGL' / 'vogl'
-
-    task 'libraries' => [vogl_dir / 'src' / 'libvogl.a']
-
-    file vogl_dir / 'src' / 'libvogl.a' => [vogl_dir / 'makefile'] do
-      chdir STX_TOP_DIR / 'support' / 'VGL' / 'vogl' do; make; end
-    end
-  end
 end
 
 
@@ -297,9 +262,9 @@
 
 #
 # Various compilation hacks here and there (sigh)
-# 
+#
 desc "Update the VM symbol database"
-task 'stx:librun:symbols' do | task |    
+task 'stx:librun:symbols' do | task |
   symbols_stc = BUILD_DIR / 'stx' / 'include' / 'symbols.stc'
   if unix?
     # UNIX VMs have the symbol database built into the binary as
@@ -311,7 +276,7 @@
       Rake::Task["stx:librun"].reenable()
       Rake::Task["stx:librun"].invoke()
     end
-  elsif win32? 
+  elsif win32?
     # Windows VM reads the symbol database from a file `symbols.stc` located
     # alongside the program executable so we need to copy that file
     app = project.application
@@ -326,9 +291,9 @@
 if unix?
   # A hack for Debian (and possibly other Linux distros) that does not ship
   # 32bit libodbc.a / libodbcinst.a. Link directly against .so
-  task :'stx:libdb/libodbc:pre' do     
+  task :'stx:libdb/libodbc:pre' do
 	  sed('-lodbc -lodbcinst' , '-l:libodbc.so.2 -l:libodbcinst.so.2', BUILD_DIR / 'stx' / 'libdb' / 'libodbc' / 'Make.proto', true)
-  end  
+  end
 end
 
 
--- a/rakelib/dist-jv.rake	Fri May 11 12:10:26 2018 +0100
+++ b/rakelib/dist-jv.rake	Tue Jun 12 21:27:24 2018 +0000
@@ -106,10 +106,12 @@
     case
       when win32?
         install BUILD_DIR / 'stx' / 'projects' / 'smalltalk'/ 'smalltalk.bat', bin_dir / 'stx.bat'
+        sed "SET stx_version=\"!stx_version_default!\"", "SET stx_version=#{version}", bin_dir / 'stx.bat', true
         install BUILD_DIR / 'stx' / 'projects' / 'smalltalk'/ 'smalltalk.ps1', bin_dir / 'smalltalk.ps1'
         install BUILD_DIR / 'stx' / 'projects' / 'smalltalk'/ 'smalltalk.cfg', bin_dir / 'smalltalk.cfg'
       when unix?
         install BUILD_DIR / 'stx' / 'projects' / 'smalltalk'/ 'smalltalk', bin_dir / 'stx'
+        sed 'STX_VERSION=$STX_VERSION_DEFAULT', "STX_VERSION=#{version}", bin_dir / 'stx', true
       else
         error_unsupported_platform
     end
--- a/rakelib/extensions.rb	Fri May 11 12:10:26 2018 +0100
+++ b/rakelib/extensions.rb	Tue Jun 12 21:27:24 2018 +0000
@@ -5,8 +5,12 @@
 
 class String
   rake_extension('/') do
-    def / (arg)
+    def / (arg)            
       r = File.join(File.expand_path(self), arg.to_s)
+      if win32? && r.size >= 260 then        
+        halt
+        r = "\\\\?\\#{r}" if not r.start_with?('\\\\?\\')
+      end
       if arg.to_s.include? ?* or arg.to_s.include? ??
         r = Dir.glob(r)
         r.reject! {|f| (f =~ /\^.svn|^CVS|^\.hg|^\.git/i) != nil}
@@ -181,8 +185,15 @@
   end
 
   # Cross-platform way of finding an executable in the $PATH.
+  # Return full path to the `cmd` or `nil` if given command
+  # is not in the path. 
+  #
+  # Examples
   #
   #   which('ruby') #=> /usr/bin/ruby  
+  #
+  #   which('boguscommand') #=> nil
+  #
   def which(cmd)
     exts = ENV['PATHEXT'] ? ENV['PATHEXT'].split(';') : ['']
     ENV['PATH'].split(File::PATH_SEPARATOR).each do |path|
@@ -207,6 +218,52 @@
     end
   end
 
+  # Like FileUtils::rm_r but does NOT remove files explicitly listed 
+  # in `exceptions` (i.e., these files are NOT removed). Exceptions
+  # are interpreted as relative paths to directory (directories) in 
+  # `list`
+  #
+  # Examples: 
+  #
+  #   Completely wipe-out `build` directory except `build/stx-config.rake`
+  #   and `build/stx-config.make`:
+  #
+  #   
+  #
+  def rm_r_ex(list, force: nil, noop: nil, verbose: nil, secure: nil, exceptions: [])        
+    if exceptions.empty? 
+      rm_r list, force: force, noop: noop, verbose: verbose, secure: secure
+    else
+      list = fu_list(list)
+      list.each do | path |
+        if File.directory? path        
+          Dir.foreach(path) do | entry |            
+            if (entry != '.') and (entry != '..') and !(exceptions.include? entry)                        
+              entry_exceptions = exceptions.select { | each_exception | each_exception.start_with? "#{entry}/" }                          
+              if entry_exceptions.empty? 
+                rm_r(File.join(path , entry), force: force, noop: noop, verbose: verbose, secure: secure)
+              else
+                entry_exceptions = entry_exceptions.collect { | each_exception | each_exception.slice(entry.size+1..-1) }
+                rm_r_ex(File.join(path , entry), force: force, noop: noop, verbose: verbose, secure: secure, exceptions: entry_exceptions)                                          
+              end              
+            end          
+          end        
+          if Dir.entries(path).size <= 2
+            rmdir path
+          end
+        else 
+          rm_r path, force: force, noop: noop, verbose: verbose, secure: secure
+        end
+      end
+    end
+  end
+
+  # Like FileUtils::rm_rf but does NOT remove files explicitly listed 
+  # in `exceptions`. See `rm_r_ex` for details
+  def rm_rf_ex(list, noop: nil, verbose: nil, secure: nil, exceptions: [])
+    rm_r_ex list, force: true, noop: noop, verbose: verbose, secure: secure, exceptions: exceptions
+  end
+
   # Pretty much like sed. Replaces all occurences of `pattern` by `replacement` in given `file`.
   # If `inplace` is `true`, then the modified contents is written back to the file. Otherwise it
   # printed on `STDOUT`.
@@ -231,20 +288,25 @@
     end
   end
 
-  # Create a compressed archive of `source`. Under Windows it creates
-  # `.zip` archive, otherwise (on Linux) it creates `tar.bz2`. 
+  # Create a compressed archive of `directory`. 
   # 
-  # The archive is created in the same directory as the source and
+  # The archive is created in the same directory as the `directory` and
   # has the same name unless explicitily specified by `archive:` option. 
-  # If `remove: true` option is set to true, the original (`source`) directory is
+  # If `remove: true` option is set to true, the `directory` to archive is
   # removed after adding the archive. 
   #
-  # As a side-effect, it generates a SHA256 checksum in file .sha256 unles
+  # If `include` is given (i.e., not `nil`), then only files within
+  # `directory` are added to the archive. 
+  # 
+  # The type of the archive is automatically derived from `archive` name (if 
+  # provided) or defaults to `.tar.bz2` on UNIXes and to `.zip` on Windows. 
+  #
+  # As a side-effect, it generates a SHA256 checksum in file .sha256 unless
   # option `sha256: false` is given.
   # 
   # Examples: 
   #
-  # Create `somedir.bar.bz2` on `/tmp` containg contants of `/tmp/somedir`:
+  # Create `somedir.bar.bz2` in `/tmp` containg contants of `/tmp/somedir`:
   #
   #     zip '/tmp/somedir'
   #
@@ -253,23 +315,32 @@
   # 
   #     zip '/tmp/build_dir', archive: 'smalltalkx', remove: true
   #  
-  def zip(source, options = {})
-    remove = options[:remove] || false
-    archive = options[:archive] || nil
-    sha256 = options[:sha256] || true
-    suffix = win32? ? '.zip' : '.tar.bz2'
-    if !archive
-      archive = "#{source}#{suffix}"
-    else
-      archive = "#{archive}#{suffix}" unless archive.end_with? suffix
+  def zip(directory, remove: false, archive: nil, sha256: true, include: nil, exclude: [])        
+    archive = directory unless archive
+    if !(archive.end_with? '.zip') && !(archive.end_with? '.tar.gz') && !(archive.end_with? '.tar.bz2')
+      archive = "#{directory}#{win32? ? '.zip' : '.tar.bz2'}"
+    end      
+    archive = File.expand_path(archive)
+    source = [ "\"#{File.basename(directory)}\"" ]
+    unless include.nil?
+      source = include.collect { | each | "\"#{File.join(File.basename(directory), each)}\"" }
     end
-    archive = File.expand_path(archive)
-    chdir File.dirname(source) do
-      if win32?
-        sh "zip -q -r #{remove ? '-T -m' : ''} \"#{archive}\" \"#{File.basename(source)}\""
-      else
-        sh "tar cjf \"#{archive}\" #{remove ? '--remove-files' : ''} \"#{File.basename(source)}\""
-      end
+    chdir File.dirname(directory) do
+      case
+        when (archive.end_with? '.zip')
+          if not exclude.empty? 
+            raise Exception.new("zip(): exclude: parameter not supported for .zip archives")
+          end         
+          sh "zip -q -r #{remove ? '-T -m' : ''} \"#{archive}\" #{source.join(' ')}"
+        when (archive.end_with? '.tar.bz2')
+          ex = (exclude.collect { | e | "\"--exclude=#{e}\" "}).join(' ')
+          sh "tar cjf \"#{archive}\" #{remove ? '--remove-files' : ''} #{ex} #{source.join(' ')}"
+        when (archive.end_with? '.tar.gz')
+          ex = (exclude.collect { | e | "\"--exclude=#{e}\" "}).join(' ')
+          sh "tar czf \"#{archive}\" #{remove ? '--remove-files' : ''} #{ex} #{source.join(' ')}"
+        else
+          raise Exception.new("Unknown archive type: #{File.basename(archive)}")
+      end      
     end
     if sha256
       require 'digest'
@@ -290,9 +361,7 @@
   # If option `remove: true` is given, then the archive is removed after
   # all files are extracted. 
   # 
-  def unzip(archive, options = {})
-    directory = options[:directory] || File.dirname(archive)
-    remove = options[:remove] || false
+  def unzip(archive, directory: File.dirname(archive), remove: false)
     archive = File.expand_path archive
     sha256 = "#{archive}.sha256"
     if File.exist? sha256
@@ -342,7 +411,7 @@
   # Like FileUtils.copy_entry, but takes a filter proc that can return false to skip a file
   def copy_entryx(src, dest, filter, preserve = false, dereference_root = false, remove_destination = false)
     Entry_.new(src, nil, dereference_root).traverse do |ent|
-      if filter.call(ent.path)
+      if filter.call(ent.path())
         destent = Entry_.new(dest, ent.rel, false)
         File.unlink destent.path if remove_destination && File.file?(destent.path)
         ent.copy destent.path
@@ -485,7 +554,26 @@
       end
     end
   end
+end
+
+module FileUtils::Entry_Extensions
+  def fix_long_path(p)
+    if win32? && p.size >= 260 && !p.start_with?("\\\\?\\") then      
+      p = "\\\\?\\#{p.gsub('/','\\')}"      
+    end
+    p
+  end
+
+  def path
+    fix_long_path(super)    
+  end
+
+  def copy_file(dest)
+    super(fix_long_path(dest))
+  end
 
 end
 
-
+class FileUtils::Entry_
+  prepend FileUtils::Entry_Extensions
+end
--- a/rakelib/hglib.rb	Fri May 11 12:10:26 2018 +0100
+++ b/rakelib/hglib.rb	Tue Jun 12 21:27:24 2018 +0000
@@ -109,11 +109,11 @@
       stdout, stderr, status = Open3.capture3(*cmd)
       case block.arity
       when 1
-        STDOUT.print stdout
-        STDERR.print stderr        
+        STDOUT.print stdout if defined? RakeFileUtils and RakeFileUtils.verbose
+        STDERR.print stderr if defined? RakeFileUtils and RakeFileUtils.verbose       
         yield status
       when 2                
-        STDERR.print stderr 
+        STDERR.print stderr if defined? RakeFileUtils and RakeFileUtils.verbose
         yield status, stdout
       when 3        
         yield status, stdout, stderr
@@ -295,8 +295,12 @@
     end
 
     def initialize(directory)
-      @path = directory
-      initialize_config
+      dir = File.expand_path(directory)
+      if not File.exist?(File.join(dir, '.hg'))
+        raise Exception.new("Not a Mercurial repository (missing .hg directory): #{dir}")
+      end
+      @path = dir
+      initialize_config    
     end
 
     def initialize_config
@@ -334,18 +338,43 @@
       log = []
       hg('log', rev: revset, template: template) do |status, out|
         if status.success?
-          puts out
+          puts out if defined? RakeFileUtils and RakeFileUtils.verbose
           log = out.split("\n")
         end
       end
-      log
+      return log
+    end
+
+    def locate(revset = '.', pattern: '*')
+      files = []
+      hg('locate', rev: revset, include: pattern) do |status, out|
+        if status.success?          
+          files = out.split("\n")
+        end
+      end
+      return files
+    end
+
+    # Return current branch name
+    def branch()      
+      hg('branch') do | status, out |
+        if status.success?          
+          return out.strip()
+        end
+      end
+      raise Exception.new("Failed to retrieve current branch")
     end
 
     # Return changeset IDs of all head revisions. 
     # If `branch` is given, return only heads in given
     # branch.
-    def heads(branch = nil)
-      branch ? log("head() and branch('#{branch}')") : log('head()')
+    # If `secret_secret` is false, then changesets in 
+    # secret phase are ignored (even if thet're heads    
+    def heads(branch = nil, include_secret: true)
+      revset  = "head()"
+      revset += " and branch('#{branch}')" if branch
+      revset += " and !secret()" if not include_secret
+      return log(revset)
     end
 
     # Return name of an active bookmark or nil if no bookmark
--- a/rakelib/inifile.rb	Fri May 11 12:10:26 2018 +0100
+++ b/rakelib/inifile.rb	Tue Jun 12 21:27:24 2018 +0000
@@ -88,7 +88,7 @@
       when @filename
         read
       else
-        puts 'Error happend during initialization'
+        merge!({})
     end
   end
 
--- a/rakelib/install.rake	Fri May 11 12:10:26 2018 +0100
+++ b/rakelib/install.rake	Tue Jun 12 21:27:24 2018 +0000
@@ -16,10 +16,30 @@
                       artifacts:main
                       artifacts:post)
 
+
 namespace :'artifacts' do
   task :pre
   task :post
   task :main => :setup do
     (zip ARTIFACTS_DIR / BUILD_NAME, remove: true) if File.exist? ARTIFACTS_DIR / BUILD_NAME
   end
+
+  task :'prebuilt-stc' do
+    archive = ARTIFACTS_DIR / "#{project.app_name}-#{project.app_version}_#{BUILD_TARGET}_prebuilt-stc.zip"
+    mkdir_p ARTIFACTS_DIR
+    zip BUILD_DIR / 'stx' / 'stc' , archive: archive, include: STC_BINARY_FILES
+  end
+
+  task :'prebuilt-librun' do
+    archive = ARTIFACTS_DIR / "#{project.app_name}-#{project.app_version}_#{BUILD_TARGET}_prebuilt-librun.zip"
+    mkdir_p ARTIFACTS_DIR
+    zip BUILD_DIR / 'stx' / 'librun' , archive: archive, include: LIBRUN_BINARY_FILES
+  end  
+
+  desc 'Create source archive'
+  task :'source' => :checkout do
+    archive = ARTIFACTS_DIR / "#{project.app_name}-#{project.app_version}_sources.tar.gz"
+    mkdir_p ARTIFACTS_DIR
+    zip ".", archive: archive, exclude: %w(CVS .svn .git .hg *.obj *.o *.dll *.so *.debug *.H *.STH *Init.c *-Test.xml artifacts tmp)
+  end
 end
--- a/rakelib/scm.rb	Fri May 11 12:10:26 2018 +0100
+++ b/rakelib/scm.rb	Tue Jun 12 21:27:24 2018 +0000
@@ -167,7 +167,7 @@
           if bookmarks.has_key? 'master'
             revision = 'master'
           else
-            if hg.heads(branch).size > 1
+            if hg.heads(branch, include_secret: false).size > 1
               raise CheckoutException.new("HG: Cannot checkout #{directory}: directory. The ->#{branch}<- branch has multiple heads but no bookmark named 'master'! (All other branches are ignored)")
             end
           end
--- a/rakelib/setup.rake	Fri May 11 12:10:26 2018 +0100
+++ b/rakelib/setup.rake	Tue Jun 12 21:27:24 2018 +0000
@@ -9,29 +9,27 @@
 
 unless defined? PROJECT;       PROJECT = 'stx:jv-branch'; end
 
-unless defined? ARCH
-  if ENV.has_key? 'ARCH'
-    ARCH = ENV['ARCH']
-  else
-    arch = RbConfig::CONFIG['host_cpu']
-    case arch
-      when 'i586', 'i686'
-        ARCH = 'i386'
-      else
-        ARCH = arch
-    end
-  end
-  raise Exception.new("Unsupported architecture #{ARCH}") if ARCH != 'i386' && ARCH != 'x86_64'
+
+
+# Guess the value of BUILD_TARGET for system on which we're currently running.
+# Think of this as of `config.guess` script used in autotools.
+# Returns the BUiLD_TARGET in form of GNU target triplet, such as 
+# `x86_64-pc-linux-gnu`
+def build_target_guess()
+  RbConfig::CONFIG['target'] # Can't believe it's so easy...
 end
 
+unless defined? BUILD_TARGET; BUILD_TARGET = ENV['BUILD_TARGET'] || build_target_guess(); end
+
+
 unless defined? TOOLCHAIN
   if ENV.has_key? 'TOOLCHAIN'
     TOOLCHAIN = ENV['TOOLCHAIN']
   else
     if unix?
       TOOLCHAIN='gcc'
-    elsif win32?
-      if ARCH == 'x86_64'
+    elsif win32?    
+      if /x86_64/ =~ BUILD_TARGET
         TOOLCHAIN='mingw64'
       else
         TOOLCHAIN='mingw32'
@@ -59,7 +57,7 @@
 #
 COMMENT
       f.puts "unless defined? PROJECT; PROJECT='#{PROJECT}'; end"
-      f.puts "unless defined? ARCH; ARCH='#{ARCH}'; end"
+      f.puts "unless defined? BUILD_TARGET; BUILD_TARGET = '#{BUILD_TARGET}'; end"
       f.puts "unless defined? REPOSITORYSET; REPOSITORYSET='#{REPOSITORYSET}'; end"
       if defined? PROJECT_DIRS
         f.puts "unless defined? PROJECT_DIRS; PROJECT_DIRS='#{PROJECT_DIRS}'; end"
@@ -139,9 +137,11 @@
   project! PROJECT.to_sym
   project.apply_imports
   app_name = project.app_name || 'smalltalkx'
-  app_version = project.app_version || '6.2.5'
+  app_version = project.app_version || '8.0.0'
 
-  unless defined? BUILD_NAME; BUILD_NAME = "#{app_name}-#{app_version}_#{ARCH}-#{win32? ? 'win32' : RbConfig::CONFIG['host_os']}" end
+  if not defined? BUILD_NAME; 
+    BUILD_NAME = "#{app_name}-#{app_version}_#{BUILD_TARGET}"    
+  end  
 end
 
 task :'setup:dependencies' => :checkout do
@@ -279,7 +279,10 @@
             # On Windows, copy the DLL application directory
             if !pkg.application? and win32?
               if !uptodate? pkg.top / project.application.directory, [ OBJ_DIR / pkg.dll_name ]
-                cp OBJ_DIR / pkg.dll_name, pkg.top /  project.application.directory
+                cp OBJ_DIR / pkg.dll_name, pkg.top /  project.application.directory                
+              end
+              if !uptodate? pkg.top / project.application.directory, [ OBJ_DIR / "#{pkg.dll_name}.debug" ]
+                cp OBJ_DIR / "#{pkg.dll_name}.debug", pkg.top /  project.application.directory                
               end
             end
           end
--- a/rakelib/support.rb	Fri May 11 12:10:26 2018 +0100
+++ b/rakelib/support.rb	Tue Jun 12 21:27:24 2018 +0000
@@ -48,15 +48,7 @@
   # Return an a Jenkins build with pre-built stc and librun. 
   # Used to download pre-build stc and librun
   def self.smalltalkx_jv_branch_build
-    plat = nil
-    if win32?
-      plat = 'Windows'
-    elsif linux?
-      plat = 'Linux'
-    else
-      error_unsupported_platform
-    end
-    return Jenkins::Build.new(%Q{https://swing.fit.cvut.cz/jenkins/job/stx_jv/ARCH=#{ARCH},PLATFORM=#{plat}N/lastSuccessfulBuild})
+    return Jenkins::Build.new(%Q{https://swing.fit.cvut.cz/jenkins/job/stx_jv/lastStableBuild})
   end
 
   class Artifact
--- a/rakelib/test.rake	Fri May 11 12:10:26 2018 +0100
+++ b/rakelib/test.rake	Tue Jun 12 21:27:24 2018 +0000
@@ -17,7 +17,7 @@
     (@failed > 0 or @errors > 0) ? 'FAILED' : 'PASSED'
   end
 
-  # Creates a new summary for given package and report file. 
+  # Creates a new summary for given package and report file.
   def initialize(pkg_name, report_file)
     raise Exception.new("Report file does not exist! #{report_file}") unless File.exist? report_file
     @pkg = pkg_name
@@ -50,12 +50,12 @@
   report_dir = File.expand_path(REPORT_DIR)
   tmp_dir = File.expand_path(TMP_DIR)
 
-  # Set STX_TMPDIR environment to make sure all temporary files created by 
-  # Smalltalk/X goes to a local tmp directory (which should be discarded 
-  # regularly). This helps to avoid garbage to heap up on Windows slaves 
+  # Set STX_TMPDIR environment to make sure all temporary files created by
+  # Smalltalk/X goes to a local tmp directory (which should be discarded
+  # regularly). This helps to avoid garbage to heap up on Windows slaves
   # assuming workspaces is thrown away often.
   ENV['STX_TMPDIR'] = tmp_dir
-  
+
   if app
     exe_dir = BUILD_DIR / app.directory
     win32? ? (exe = "#{app.executable}.com") : (exe = "./#{app.executable}")
@@ -69,7 +69,7 @@
     packages_args = ''
     packages.each {|p| packages_args += " -p #{p}"}
 
-    File.exist?(coveragerportformat_dot_st) ? (runner_opts = "-abortOnSEGV -I --execute #{run_report_st}") : (runner_opts = "-I -f #{run_report_st}")
+    File.exist?(coveragerportformat_dot_st) ? (runner_opts = "--abortOnSEGV --abortOnInternalError -I --execute #{run_report_st}") : (runner_opts = "-I -f #{run_report_st}")
     sh "#{exe} #{runner_opts} #{global_opts} -i \"#{BUILD_ID}\" -D \"#{report_dir}\" -r #{report} #{report_opts} #{packages_args}"
   end
   # No, do not remove tmp_dir here. If a test fails,
@@ -86,27 +86,34 @@
   app = project.application
   project.packages.each do |pkg|
     if pkg.test
-      task "test:package:#{pkg.name}" => ['stx:goodies/builder/reports', REPORT_DIR] do
+      task "test:package:#{pkg.name}:pre"
+      task "test:package:#{pkg.name}:post"
+      task "test:package:#{pkg.name}:main" => ['stx:goodies/builder/reports', REPORT_DIR] do
         report_file = File.expand_path(REPORT_DIR) / "#{pkg.name_components.join('_')}-#{BUILD_ID}-Test.xml"
-        # Sigh, sigh. On CI server, sometimes it happen that tests are simply not run. 
+        # Sigh, sigh. On CI server, sometimes it happen that tests are simply not run.
         # I was not able to debug the issue anyhow, any attempt to attach a debugger,
         # add a debug print or run it manually failed as the problem did not manifest.
-        # 
+        #
         # This is a feeble and horrible attempt to just cover up the problem by
         # trying multiple times. I don't want to spend more time chasing issues
-        # like this. Sorry. 
+        # like this. Sorry.
         #
-        # Let's see if it helps. 
+        # Let's see if it helps.
         5.times do #for i in 1..5 do
           pkg.coverage ? run_report(app, [pkg.name], TESTREPORT_CLASS, '', '--coverage') : run_report(app, [pkg.name], TESTREPORT_CLASS)
           # Extract summary from XML report and keep it. Yeah, parsing XML
-          # using regexps is a bad thing, but it's quick and lot less code!          
+          # using regexps is a bad thing, but it's quick and lot less code!
           break if File.exist?(report_file)
         end
         report_summary = TestReportSummary.new(pkg.name, report_file)
         TestReportSummary::SUMMARIES << report_summary
       end
+
+      task "test:package:#{pkg.name}" => [ "test:package:#{pkg.name}:pre",
+                                           "test:package:#{pkg.name}:main",
+                                           "test:package:#{pkg.name}:post" ]
       task :'test:packages' => "test:package:#{pkg.name}"
+      task "#{pkg.name}:test" => "test:package:#{pkg.name}"
     end
 
     if pkg.lint
--- a/specs/baseline.rbspec	Fri May 11 12:10:26 2018 +0100
+++ b/specs/baseline.rbspec	Tue Jun 12 21:27:24 2018 +0000
@@ -50,8 +50,7 @@
 				                           ]
 
     file BUILD_DIR / 'stx' / 'rules'  => BUILD_DIR do | t |
-      checkout :'exept:public', 'stx/rules'
-      top = "#{File.absolute_path(BUILD_DIR).gsub("/", "\\")}\\stx"
+      checkout :'exept:public', 'stx/rules'      
       chdir BUILD_DIR / 'stx' / 'rules'  do          
         rm_f 'stmkmf'
         rm_f 'stdHeader'
@@ -59,36 +58,6 @@
         rm_f 'stdRules'
         rm_f 'stdHeader_bc'
         rm_f 'stdRules_bc'
-        if unix? then                
-          # Argh, cannot use ln_s as it cannot make symlinks with relative
-          # paths. Sigh, 
-          sh "ln -s \'../../../bin/stmkmf.rb\' stmkmf"
-        end
-      
-        if win32? then
-          File.open('stdHeader_bc', "w") do | f |
-            f.puts <<-CONTENTS      
-# Do not edit! Automatically generated by rake (specs/#{File.basename(__FILE__)})
-#
-!IF !defined(TOP) 
-TOP=#{top}
-!ENDIF
-MAKELIB = $(TOP)\\..\\..\\makelib
-!INCLUDE $(MAKELIB)\\definitions-w.make
-CONTENTS
-          end
-          File.open('stdRules_bc', "w") do | f |
-            f.puts <<-CONTENTS      
-# Do not edit! Automatically generated by rake (specs/#{File.basename(__FILE__)})
-#
-!IF !defined(TOP) 
-TOP=#{top}
-!ENDIF
-MAKELIB = $(TOP)\\..\\..\\makelib
-!INCLUDE $(MAKELIB)\\rules-w.make
-CONTENTS
-          end
-        end
       end
     end
 
--- a/specs/reports-misc.rbspec	Fri May 11 12:10:26 2018 +0100
+++ b/specs/reports-misc.rbspec	Tue Jun 12 21:27:24 2018 +0000
@@ -4,7 +4,7 @@
   import :'stx:jv-branch:for-reports-only'
 
 
-  repository :'bitbucket:janvrany'
+  repository :'jv-branch:public'
 
   package 'stx:libscm/common', :test => true, :lint => true, :link => false
   package 'stx:libscm/git', :test => true, :lint => true, :link => false
@@ -14,7 +14,7 @@
 project :'stx:libscm/mercurial:reports' do
   import :'stx:jv-branch:for-reports-only'
 
-  repository :'bitbucket:janvrany'
+  repository :'jv-branch:public'
   package 'stx:libscm'
   package 'stx:libscm/common',    :link => false, :lint => true
   package 'stx:libscm/mercurial', :link => false, :lint => true
@@ -47,7 +47,7 @@
   import :'stx:repositories'
   import :'stx:6.2.2'
 
-  repository :'bitbucket:janvrany'
+  repository :'jv-branch:public'
   package 'stx:libscm'
   package 'stx:libscm/common',    :link => false#, :test => true, :lint => true, :coverage => true # Coverage not suypported in 6.2.2
   package 'stx:libscm/mercurial', :link => false, :test => true, :lint => true#, :coverage => true
@@ -73,7 +73,7 @@
 project :'stx:regression' do
   import :'stx:jv-branch:for-reports-only'
 
-  package 'stx:goodies/regression', :repository => :'bitbucket:janvrany', :branch => 'jv', :test => true
+  package 'stx:goodies/regression', :repository => :'jv-branch:public', :branch => 'jv', :test => true
   package 'stx:libview/tests', :test => true
   
 end
@@ -125,7 +125,7 @@
 project :'jv:calipel:s:reports' do
   import :'stx:jv-branch:for-reports-only'
 
-  package 'jv:calipel', :repository => :'bitbucket:janvrany'
+  package 'jv:calipel', :repository => :'jv-branch:public'
   package "jv:calipel/s", :link => false
   package "jv:calipel/s/stx", :link => false
   package "jv:calipel/s/tests", :test => true, :lint => true, :link => false
--- a/specs/repositories.rbspec	Fri May 11 12:10:26 2018 +0100
+++ b/specs/repositories.rbspec	Tue Jun 12 21:27:24 2018 +0000
@@ -1,93 +1,54 @@
-# This file defines a set of repositories to fetch source code from. 
-#
-#
-# Following repository sets are defined here:
-#
-#  1.*default*: fetches source from public repositories. Code forked
-#    by Smalltalk/X jv-branch is loaded from Jan Vrany's BitBucket
-#    account (https://bitbucket.org/janvrany/), the rest is loaded
-#    from eXept's public CVS repository (:pserver:cvs@cvs.smalltalk-x.de:/cvs/stx)
-#    Sources of stc and librun (Smalltalk/X VM) are not checked out 
-#    (since they're not freely available) but pre-build binaries are
-#    downloaded from SWING CI server ( https://swing.fit.cvut.cz/jenkins/job/stx_jv/ )
+# This file defines a set of repositories to fetch source code from.
 #
-#    This is the default if no REPOSITORYSET value is set. 
-#
-#
-#  2.*ci-swing*: for use on SWING CI [1]. Uses local staging repositories hosted
-#    on https://swing.fit.cvut.cz/hg in addition to canonical on hosted on 
-#    BitBucket.
-#
-#  3."ci-jv": for use on Jan Vrany's private CI. Uses (his) local staging repositories 
-#    and repositories hosted at SWING as upstream repos. 
-#    
-#
-case REPOSITORYSET 
-when 'default'
+
+begin
+  #
+  # All packages that are not (yet) forked are taken from
+  # eXept's public CVS repository
+  #
   repository :'exept:public' do
-    type :cvs    
+    type :cvs
     canonical ":pserver:cvs@cvs.smalltalk-x.de:/cvs/stx"
   end
 
-  repository :'bitbucket:janvrany' do
-    type :hg
-    canonical "https://bitbucket.org/janvrany"
-    separator '-'
-  end
+  #
+  # Auto-configure jv-branch:public and jv-branch:private
+  # repositories based on the URL of rakefiles.
+  #  
+  origin_forest_url = HG::Repository.new(BUILD_DIR / '..').paths['default'].slice(0.. - 'stx-goodies-builder-rake'.size - 2)
+  cloned_from_bitbucket = origin_forest_url =~ /bitbucket.org/
+  cloned_from_swing     = origin_forest_url =~ /swing\.fit\.cvut\.cz/
+  cloned_fron_devsrepo  = (not cloned_from_bitbucket) and (not cloned_from_swing)
+
 
-  if core_developer?
-    repository :'swing:private:hg' do
-      type :hg
-      canonical "ssh://192.168.12.2//hg"
-      separator '.'
+  repository :'jv-branch:public' do
+    type :hg
+    separator '-'
+    canonical "https://bitbucket.org/janvrany"
+
+    case
+    when cloned_from_swing
+      staging   "ssh://hg@swing.fit.cvut.cz//var/local/hg"
+    when cloned_fron_devsrepo
+      upstream  "ssh://hg@swing.fit.cvut.cz//var/local/hg"
+      staging   origin_forest_url
     end
   end
 
-when 'ci-swing'
-  repository :'exept:public' do
-    type :cvs    
-    canonical ":ext:#{ENV['USER'] || ENV['USERNAME']}@swing.fit.cvut.cz/var/local/cvs"
-  end
-
-  repository :'bitbucket:janvrany' do
-    type :hg
-    canonical "https://bitbucket.org/janvrany"
-    # Use local build slave network address to allow for faster
-    # checkouts
-    staging "ssh://#{ENV['USER'] || ENV['USERNAME']}@192.168.12.1//var/local/hg"
-    separator '-'
-  end
-
-  repository :'swing:private:hg' do
-    type :hg
-    canonical "ssh://#{ENV['USER'] || ENV['USERNAME']}@192.168.12.2//hg"
-    separator '.'
+  # Only core developer and/or Jenkins have access to
+  # stx:stc and stx:librun...
+  if (core_developer? or jenkins?)
+    repository :'jv-branch:private' do
+      type :hg
+      separator '-'
+      case
+      when cloned_fron_devsrepo
+        canonical origin_forest_url
+      else
+        canonical "ssh://192.168.12.2//hg"
+      end
+    end
   end
-
-
-when 'ci-jv'
-  repository :'exept:public' do
-    type :cvs    
-    canonical ":pserver:cvs@cvs.smalltalk-x.de:/cvs/stx"
-  end
-
-  repository :'bitbucket:janvrany' do
-    type :hg
-    canonical "https://bitbucket.org/janvrany"
-    upstream  "ssh://hg@swing.fit.cvut.cz//var/local/hg"
-    staging   "ssh://hg@192.168.27.250"
-    separator '-'
-  end
-
-  repository :'swing:private:hg' do
-    type :hg
-    # No, don't do this - CI has no access to those
-    # SWING repositories (no VPN, no keys set up)
-    #
-    # canonical "ssh://192.168.12.2//hg"
-    # staging   "ssh://hg@192.168.27.250"
-    
-    canonical "ssh://hg@192.168.27.250"
-    separator '-'
-  end
+rescue Exception
+  info "Building from source archive"
 end
--- a/specs/stx-jv.rbspec	Fri May 11 12:10:26 2018 +0100
+++ b/specs/stx-jv.rbspec	Tue Jun 12 21:27:24 2018 +0000
@@ -1,22 +1,22 @@
-def build_platform_id()
-  return "#{ARCH}-#{win32? ? 'win32' : RbConfig::CONFIG['host_os']}"
-end
-
 # A helper function to download and unpack pre-built stc and librun
 # for those who are not lucky enough to have an access to sources
-def download_blob_matching(pattern, directory)
+def download_binary_component(component, directory)
   plat = nil
   blob = nil
   sha256 = nil
-  if win32? then
-      plat = 'Windows'
-  elsif linux?
-    plat = 'Linux'
-  else
-    error_unsupported_platform()
+  pattern = /prebuilt-#{Regexp.quote(component)}/
+
+  build_url = %Q{https://swing.fit.cvut.cz/jenkins/job/stx_jv/lastStableBuild}
+
+  origin_forest_url = HG::Repository.new(BUILD_DIR / '..').paths['default'].slice(0.. - 'stx-goodies-builder-rake'.size - 2) #/
+  # When building using Jan's staging repositories, use pre-built stc and librun
+  # from there too. 
+  if (origin_forest_url =~ /192.168.27.250/) then
+    build_url = %Q{http://192.168.27.253:8080/job/stx_jv/lastStableBuild/}
   end
-  build = Jenkins::Build.new(%Q{https://swing.fit.cvut.cz/jenkins/job/stx_jv/lastStableBuild})
-  platform = build_platform_id()
+
+  build = Jenkins::Build.new(build_url)
+  platform = BUILD_TARGET
   build.artifacts.each do | artifact |
     if (pattern =~ artifact.name) and (artifact.name.include? platform) then
       if artifact.name.end_with? '.sha256' then
@@ -27,19 +27,34 @@
     end
   end
   if not blob then
-    error "No artifact matching given pattern found"
+    error "No binary component \"'#{component}\" found: #{build_url}/artifact/artifacts/"
   end
-  puts "Downloading binary component #{blob.name}"
+  if sha256 then
+    info "Downloading binary component SHA265 checksum #{sha256.name}"
+    sha256.download_to(directory)
+    if File.exist? directory / "#{component}" and File.exist? directory / "#{component}.sha256" then
+      # Compare a "current" checksum and the (possibly) "new" checkum.
+      # if same, skip the download (assuming that version has been
+      # downloaded already)
+      sha256_current = File.read(directory / ".#{component}.sha256")
+      sha256_new     = File.read(directory / sha256.name)
+      if sha256_new == sha256_current then
+        return
+      end
+    end
+  end
+  info "Downloading binary component #{blob.name}"
   blob.download_to(directory)
-  if sha256 then
-    sha256.download_to(directory)
+  info "Extracting binary component #{blob.name}"
+  if File.exist? directory / component then
+    rm_rf directory / component
   end
   unzip directory / blob.name, remove: true
-  rm_f directory / sha256.name
+  if sha256 then
+    mv directory / sha256.name, directory / ".#{component}.sha256"
+  end
 end
 
-
-
 project :'stx:jv-branch-core' do
   # Core Smalltalk/X - does contain only standard libraries,
   # and development tools. Does not contain any other 'features'
@@ -48,58 +63,58 @@
   import :'stx:baseline'
 
   # FORKED STC and librun
-  package "stx:stc", :repository => :'swing:private:hg', :branch => 'jv',
+  package "stx:stc", :repository => :'jv-branch:private', :branch => 'jv',
     :checkout => (Proc.new do | pkg |
-    # Download pre-compiled binary if user has no access to source code
-    if Rake::Stx::Configuration::Repository::find(pkg.repository) then
-          checkout pkg.repository, 'stx/stc', :branch => pkg.branch, :revision => pkg.revision
-    else
-        download_blob_matching(/prebuilt-stc/, BUILD_DIR / 'stx')
+      # Download pre-compiled binary if user has no access to source code
+      if Rake::Stx::Configuration::Repository::find(pkg.repository) then
+        checkout pkg.repository, 'stx/stc', :branch => pkg.branch, :revision => pkg.revision
+      else
+        download_binary_component('stc', BUILD_DIR / 'stx')
       end
     end),
     :update => (Proc.new do | pkg |
       if (File.exists? BUILD_DIR / 'stx' / 'stc' / '.hg' / 'hgrc') then
         update pkg.repository, 'stx/stc', :branch => pkg.branch, :revision => pkg.revision
       else
-        warn "Not updating #{pkg.name} as no HG repository found in #{BUILD_DIR / 'stx' / 'stc'}"
+        download_binary_component('stc', BUILD_DIR / 'stx')
     end
   end)
 
-  package "stx:librun", :repository => :'swing:private:hg', :branch => 'jv',
+  package "stx:librun", :repository => :'jv-branch:private', :branch => 'jv',
     :checkout => (Proc.new do | pkg |
-    # Download pre-compiled binary if user has no access to source code
-    if Rake::Stx::Configuration::Repository::find(pkg.repository) then
+      # Download pre-compiled binary if user has no access to source code
+      if Rake::Stx::Configuration::Repository::find(pkg.repository) then
           checkout pkg.repository, 'stx/librun', :branch => pkg.branch, :revision => pkg.revision
-    else
-        download_blob_matching(/prebuilt-librun/, BUILD_DIR / 'stx')
+      else
+        download_binary_component('librun', BUILD_DIR / 'stx')
       end
     end),
     :update => (Proc.new do | pkg |
       if (File.exists? BUILD_DIR / 'stx' / 'librun' / '.hg' / 'hgrc') then
         update pkg.repository, 'stx/stc', :branch => pkg.branch, :revision => pkg.revision
       else
-        warn "Not updating #{pkg.name} as no HG repository found in #{BUILD_DIR / 'stx' / 'librun'}"
+        download_binary_component('librun', BUILD_DIR / 'stx')
     end
   end)
 
   # FORKED libraries
   # ======================================================================
-  package "stx:libbasic", :repository => :'bitbucket:janvrany', :branch => 'jv'
-  package "stx:libbasic2", :repository => :'bitbucket:janvrany', :branch => 'jv'
-  package "stx:libbasic3", :repository => :'bitbucket:janvrany', :branch => 'jv'
-  package "stx:libcomp", :repository => :'bitbucket:janvrany', :branch => 'jv'
-  package "stx:libtool", :repository => :'bitbucket:janvrany', :branch => 'jv'
-  package "stx:libtool2",:repository => :'bitbucket:janvrany', :branch => 'jv'
+  package "stx:libbasic", :repository => :'jv-branch:public', :branch => 'jv'
+  package "stx:libbasic2", :repository => :'jv-branch:public', :branch => 'jv'
+  package "stx:libbasic3", :repository => :'jv-branch:public', :branch => 'jv'
+  package "stx:libcomp", :repository => :'jv-branch:public', :branch => 'jv'
+  package "stx:libtool", :repository => :'jv-branch:public', :branch => 'jv'
+  package "stx:libtool2",:repository => :'jv-branch:public', :branch => 'jv'
 
-  package "stx:libview", :repository => :'bitbucket:janvrany', :branch => 'jv'
-  package "stx:libview2", :repository => :'bitbucket:janvrany', :branch => 'jv'
-  package "stx:libwidg", :repository => :'bitbucket:janvrany', :branch => 'jv'
-  package "stx:libwidg2", :repository => :'bitbucket:janvrany', :branch => 'jv'
+  package "stx:libview", :repository => :'jv-branch:public', :branch => 'jv'
+  package "stx:libview2", :repository => :'jv-branch:public', :branch => 'jv'
+  package "stx:libwidg", :repository => :'jv-branch:public', :branch => 'jv'
+  package "stx:libwidg2", :repository => :'jv-branch:public', :branch => 'jv'
 
-  package "stx:goodies/sunit", :repository => :'bitbucket:janvrany', :branch => 'jv'
-  package "stx:goodies/monticello", :repository => :'bitbucket:janvrany', :branch => 'jv'
-  package "stx:goodies/refactoryBrowser", :repository => :'bitbucket:janvrany', :branch => 'jv'
-  package "stx:libsvn", :repository => :'bitbucket:janvrany', :branch => 'jv'
+  package "stx:goodies/sunit", :repository => :'jv-branch:public', :branch => 'jv'
+  package "stx:goodies/monticello", :repository => :'jv-branch:public', :branch => 'jv'
+  package "stx:goodies/refactoryBrowser", :repository => :'jv-branch:public', :branch => 'jv'
+  package "stx:libsvn", :repository => :'jv-branch:public', :branch => 'jv'
 
   # Some more development tools to build and preload
   # ======================================================================
@@ -108,7 +123,7 @@
 
   # SmallSense
   package "stx:goodies/regex", :repository => :'exept:public'
-  package "stx:goodies/smallsense", :repository => :'bitbucket:janvrany'
+  package "stx:goodies/smallsense", :repository => :'jv-branch:public'
   package "stx:goodies/smallsense/refactoring_custom"
 
 
@@ -128,49 +143,9 @@
       if PROJECT == 'stx:jv-branch' then
         chdir BUILD_DIR / 'stx' / 'projects' / 'smalltalk' do
           if win32?
-            cmd = "smalltalk.bat --abortOnSEGV -I --quick --eval \"Stdout nextPutLine:'OKay, VM runs'. Smalltalk exit: 0\""
+            sh "smalltalk.bat --abortOnSEGV -I --quick --eval \"Stdout nextPutLine:'OKay, VM runs'. Smalltalk exit: 0\""
           else
-            cmd = "./smalltalk --abortOnSEGV -I --quick --eval \"Stdout nextPutLine:'OKay, VM runs'. Smalltalk exit: 0\""
-          end
-          if not system cmd
-            raise Exception.new("Cannot run smalltalk!")
-          end
-        end
-      end
-    end
-
-    if win32_wine?
-      task :'stx:projects/smalltalk:pre' do
-        chdir (BUILD_DIR / 'stx' / 'projects' / 'smalltalk') do
-          make "buildDate.h"
-        end
-      end
-
-    end
-
-    # Hack for badly-named files in libtool
-    task :'stx:libtool:post' do
-      chdir (BUILD_DIR / 'stx' / 'libtool') do
-        # Windows does not support symlinks, copy the file
-        if win32?
-          if not File.exist? 'Tools_BrowserList.STH'
-            cp 'Tools__BrowserList.STH', 'Tools_BrowserList.STH'
-          end
-          if not File.exist? 'Tools_NavigatorModel.STH'
-            cp 'Tools__NavigatorModel.STH', 'Tools_NavigatorModel.STH'
-          end
-          if not File.exist? 'Tools_NavigationState.STH'
-            cp 'Tools__NavigationState.STH', 'Tools_NavigationState.STH'
-          end
-        else
-          if not File.exist? 'Tools_BrowserList.H'
-            ln_s 'Tools__BrowserList.H', 'Tools_BrowserList.H'
-          end
-          if not File.exist? 'Tools_NavigatorModel.H'
-            ln_s 'Tools__NavigatorModel.H', 'Tools_NavigatorModel.H'
-          end
-          if not File.exist? 'Tools_NavigationState.H'
-            ln_s 'Tools__NavigationState.H', 'Tools_NavigationState.H'
+            sh "./smalltalk --abortOnSEGV -I --quick --eval \"Stdout nextPutLine:'OKay, VM runs'. Smalltalk exit: 0\""
           end
         end
       end
@@ -188,10 +163,10 @@
 
 
   app_name "smalltalkx-jv-branch"
-  app_version "6.2.6_#{BUILD_ID}"
+  app_version "8.0.0_#{BUILD_ID}"
 
   # Forked PetitParser
-  package "stx:goodies/petitparser", :repository => :'bitbucket:janvrany', :link => false
+  package "stx:goodies/petitparser", :repository => :'jv-branch:public', :link => false
   package "stx:goodies/petitparser/tests", :link => false
   package "stx:goodies/petitparser/analyzer",  :link => false
   package "stx:goodies/petitparser/analyzer/tests",  :link => false
@@ -201,35 +176,46 @@
   package "stx:goodies/communication", :repository => :'exept:public'
 
   # Java support
-  package "stx:libjava", :repository => :'bitbucket:janvrany'
+  package "stx:libjava", :repository => :'jv-branch:public'
   package "stx:libjava/tools"
   package "stx:libjava/experiments"
   #package "stx:libjava/examples" :link => false
 
+  # VDB
+  package 'stx:goodies/magritte', :repository => :'exept:public', :link => false
+  package 'stx:goodies/announcements', :repository => :'exept:public'
+  package 'jv:libgdbs', :repository => :'jv-branch:public', :link => false
+  package 'jv:libgdbs/tests', :repository => :'jv-branch:public', :link => false
+  package 'jv:vdb', :repository => :'jv-branch:public', :link => false
+
+
   # Smalltalk/X IDE
-  application 'stx:projects/smalltalk', :repository => :'bitbucket:janvrany', :branch => 'jv'
+  application 'stx:projects/smalltalk', :repository => :'jv-branch:public', :branch => 'jv'
 
-  package 'stx:libscm', :repository => :'bitbucket:janvrany'
-  package 'stx:libscm/common', :repository => :'bitbucket:janvrany'
-  package 'stx:libscm/mercurial', :repository => :'bitbucket:janvrany'
-  package 'stx:libscm/mercurial/monticello', :repository => :'bitbucket:janvrany'
-  package 'stx:goodies/ring', :repository => :'bitbucket:janvrany', :link => false
-  package 'stx:goodies/libcairo', :repository => :'bitbucket:janvrany', :link => false
-  package 'stx:goodies/cypress', :repository => :'bitbucket:janvrany'
-  package "stx:goodies/xmlsuite", :repository => :'bitbucket:janvrany'
+  package 'stx:libscm', :repository => :'jv-branch:public'
+  package 'stx:libscm/common', :repository => :'jv-branch:public'
+  package 'stx:libscm/mercurial', :repository => :'jv-branch:public'
+  package 'stx:libscm/mercurial/monticello', :repository => :'jv-branch:public'
+  package 'stx:goodies/ring', :repository => :'jv-branch:public', :link => false
+  package 'stx:goodies/libcairo', :repository => :'jv-branch:public', :link => false
+  package 'stx:goodies/cypress', :repository => :'jv-branch:public'
+  package "stx:goodies/xmlsuite", :repository => :'jv-branch:public'
   package "stx:goodies/loggia", :repository => :'exept:public', :link => false
-  package 'stx:goodies/smaCC',:repository => :'bitbucket:janvrany'
-  package 'stx:goodies/regression',:repository => :'bitbucket:janvrany', :branch => 'jv', :link => false
-  package "stx:goodies/builder", :repository => :'bitbucket:janvrany',:branch => 'jv', :link => false
+  package 'stx:goodies/smaCC',:repository => :'jv-branch:public'
+  package 'stx:goodies/regression',:repository => :'jv-branch:public', :branch => 'jv', :link => false
+  package "stx:goodies/builder", :repository => :'jv-branch:public',:branch => 'jv', :link => false
   package "stx:goodies/builder/reports"
 
   # Define a "default" test suite, i.e., a set of tests that are run when
   # user does `rake test`.
   package 'stx:libview/tests', :test => true;
+  package 'stx:libwidg/tests', :test => true;
+  package 'stx:libtool/tests', :test => true;
   package 'stx:libjava', :test => true;
   package 'stx:libjava/tools', :test => true;
   package 'stx:goodies/regression', :test => true;
   package 'stx:libscm/mercurial', :test => true;
+  package 'jv:libgdbs/tests', :test => true;
 
   tasks do
     # We need to checkout CharacterEncoderCodeGenerator from stx:goodies
@@ -274,8 +260,15 @@
       end
     end
 
-
-
+    # Normally the test package is not compiled but we have to
+    # compile programs used in tests before running the tests
+    # themselves. Sigh...
+    task "test:package:jv:libgdbs/tests:pre" do
+      chdir BUILD_DIR / 'jv' / 'libgdbs' / 'tests' do
+        sh "make -f Makefile.init mf" if unix?
+        make "testprograms"
+      end
+    end
 
     task :'install:main' => :'dist:jv-branch'
 
@@ -291,7 +284,7 @@
 
 
     	cairo_dlls = nil
-    	if ARCH == 'i386'
+    	if /i686/ =~ BUILD_TARGET
 	      cairo_dlls =  [
 	      	'libcairo-2.dll' ,
 					'libfontconfig-1.dll' ,
@@ -319,7 +312,7 @@
     		task :'dist:jv:post' => [ bin_dir / dll ]
 				file bin_dir / dll do
 					mkdir_p bin_dir
-					cp BUILD_DIR / 'stx' / 'goodies' / 'libcairo' / 'support' / 'win32' / (ARCH == 'i386' ? 'i586' : ARCH) / dll , bin_dir / dll
+					cp BUILD_DIR / 'stx' / 'goodies' / 'libcairo' / 'support' / 'win32' / (/i686/ =~ BUILD_TARGET ? 'i586' : 'x86_64') / dll , bin_dir / dll
 				end
     	end
     end
@@ -329,25 +322,13 @@
 
     task :'install:main' => [:'stx:jv-branch:extractver']
     task :'artifacts:main' => [:'stx:jv-branch:extractver']
-    task :'artifacts:post' => [:'stx:jv-branch:extractver'] do
-      # Just to be sure, refuse to create stc and librun archives
-      # if the directory contain sources. This is to prevent accidental
-      # leak of non-disclosed sources. Better safe than sorry!
-      if core_developer? || (File.exist? BUILD_DIR / 'stx' / 'stc' / '.hg') || (File.exist? BUILD_DIR / 'stx' / 'stc' / 'CVS') then
-        puts "WARNING: NOT CREATING stc archive since directory contains sources!!!"
-      else
-        # Be paranoid, do cleanup here. Should have been done, but who knows...
-        cleanup_stc()
-    	  zip BUILD_DIR / 'stx' / 'stc' , archive: ARTIFACTS_DIR / "#{project.app_name}-#{project.app_version}_#{build_platform_id()}_prebuilt-stc"
-      end
-      if core_developer? || (File.exist? BUILD_DIR / 'stx' / 'librun' / '.hg') || (File.exist? BUILD_DIR / 'stx' / 'librun' / 'CVS') then
-        puts  "WARNING: NOT CREATING librun archive since directory contains sources!!!"
-      else
-        # Be paranoid, do cleanup here. Should have been done, but who knows...
-        cleanup_librun()
-    	  zip BUILD_DIR / 'stx' / 'librun' , archive: ARTIFACTS_DIR / "#{project.app_name}-#{project.app_version}_#{build_platform_id()}_prebuilt-librun"
-      end
-    end
+    task :'artifacts:post' => [:'artifacts:prebuilt-stc', :'artifacts:prebuilt-librun' ]
+
+    # Add dependency of stc and librun archive tasks on :'stx:jv-branch:extractver' so
+    # they create archives with up-to-date version.
+    task :'artifacts:prebuilt-stc' => :'stx:jv-branch:extractver'
+    task :'artifacts:prebuilt-librun' => :'stx:jv-branch:extractver'
+
   end
 end
 
@@ -367,7 +348,7 @@
   ver = (ver.split(".")[0..2]).join(".")
   project.app_version "#{ver}_#{BUILD_ID}"
   # This is really ugly. We need to clean that up...
-  BUILD_NAME.replace "#{project.app_name}-#{project.app_version}_#{build_platform_id()}"
+  BUILD_NAME.replace "#{project.app_name}-#{project.app_version}_#{BUILD_TARGET}"
 end
 
 
--- a/tests/dsl_tests.rb	Fri May 11 12:10:26 2018 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,45 +0,0 @@
-require File.join(File.dirname(__FILE__), '..' , 'rakelib' , 'dsl')
-require 'test/unit'
-
-module Rake::StX::DSL
-  class ObjectTests < Test::Unit::TestCase
-
-    class A < Object
-      property :bar
-      property :baz, :default => false, :values => [true, false]
-      property :qux, :default => (Proc.new do 
-                                   :qux
-                                 end)      
-    end
-    
-    def test_01           
-      f = A.new()
-      assert f.bar == nil
-      f.bar "baz"
-      assert f.bar == "baz"
-      f.bar = "qux"
-      assert f.bar == "qux"
-    end
-
-    def test_02
-      f = A.new()     
-      assert f.baz == false
-      f.baz true
-      assert f.baz == true
-      gote = false
-      begin
-        f.baz "String"
-      rescue Exception 
-        gote = true
-      end
-      assert gote
-      
-    end
-    
-    def test_03
-      f = A.new()
-      assert f.qux.kind_of?  Proc        
-      assert f.qux.call() == :qux      
-    end
-  end
-end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/tests_dsl.rb	Tue Jun 12 21:27:24 2018 +0000
@@ -0,0 +1,50 @@
+require File.join(File.dirname(__FILE__), '..' , 'rakelib' , 'dsl')
+require 'test/unit'
+
+module Rake::StX::DSL
+  class ObjectTests < Test::Unit::TestCase
+
+    class A < Object
+      property :bar
+      property :baz, :default => false, :values => [true, false]
+      property :qux, :default => (Proc.new do 
+                                   :qux
+                                 end)      
+    end
+    
+    def test_01           
+      f = A.new()
+      assert f.bar == nil
+      f.bar "baz"
+      assert f.bar == "baz"
+      f.bar = "qux"
+      assert f.bar == "qux"
+    end
+
+    def test_02
+      f = A.new()     
+      assert f.baz == false
+      f.baz true
+      assert f.baz == true
+      gote = false
+      begin
+        f.baz "String"
+      rescue Exception 
+        gote = true
+      end
+      assert gote
+      
+    end
+    
+    def test_03
+      f = A.new()
+      assert f.qux.kind_of?  Proc        
+      assert f.qux.call() == :qux      
+    end
+  end
+end
+
+if __FILE__ == $0
+  require 'test/unit/ui/console/testrunner'
+  Test::Unit::UI::Console::TestRunner.run(Rake::StX::DSL::ObjectTests)
+end
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/tests_extensions.rb	Tue Jun 12 21:27:24 2018 +0000
@@ -0,0 +1,153 @@
+require File.join(File.dirname(__FILE__), '..' , 'rakelib' , 'extensions')
+require 'tmpdir'
+require 'test/unit'
+
+include RakeFileUtils
+
+class RakeFileUtilsTests < Test::Unit::TestCase
+  def test_which()    
+    assert File.executable?( which('ls') )
+    assert which('boguscommand').nil?
+  end
+
+  # A helper for `test_zip_01()`
+  def test_zip_01_fmt(fmt)
+    Dir.mktmpdir do | tmp |
+      chdir File.join(File.dirname(__FILE__), '..') do
+        archive = File.join(tmp, "rakelib#{fmt}")
+        zip "rakelib", archive: archive
+        assert File.exist? archive
+        unzip archive
+        assert File.exist? File.join(tmp, 'rakelib')
+        assert File.exist? File.join(tmp, 'rakelib', 'extensions.rb')
+        assert File.exist? File.join(tmp, 'rakelib', 'support.rb')
+      end
+    end
+  end  
+
+  def test_zip_01()    
+    test_zip_01_fmt('.zip') if which 'zip'
+    test_zip_01_fmt('.tar.bz2') if which 'tar' and which 'bzip2'
+    test_zip_01_fmt('.tar.gz') if which 'tar' and which 'gzip'
+  end
+
+  def test_zip_02()     
+    chdir File.join(File.dirname(__FILE__), '..') do        
+      fmt = win32? ? '.zip' : '.tar.bz2'
+      begin
+        zip "rakelib"        
+        assert(File.exist? "rakelib#{fmt}")
+      ensure
+        rm_f "rakelib#{fmt}" 
+      end
+    end
+  end
+
+  # A helper for `test_zip_03()`
+  def test_zip_03_fmt(fmt)
+    Dir.mktmpdir do | tmp |
+      chdir File.join(File.dirname(__FILE__), '..') do
+        archive = File.join(tmp, "rakelib#{fmt}")
+        zip "rakelib", archive: archive, include: [ 'extensions.rb' ]
+        assert File.exist? archive
+        unzip archive
+        assert File.exist? File.join(tmp, 'rakelib')
+        assert File.exist? File.join(tmp, 'rakelib', 'extensions.rb')
+        assert (not File.exist? File.join(tmp, 'rakelib', 'support.rb'))
+      end
+    end
+  end  
+
+  def test_zip_03()    
+    test_zip_03_fmt('.zip') if which 'zip'
+    test_zip_03_fmt('.tar.bz2') if which 'tar' and which 'bzip2'
+    test_zip_03_fmt('.tar.gz') if which 'tar' and which 'gzip'
+  end
+
+  def test_rm_rf_ex() 
+    def with_test_dir(&block) 
+      Dir.mktmpdir do | tmp |
+        chdir tmp do
+          mkdir_p File.join("a", "b", "c")
+          touch File.join("a", "aaa1.txt")
+          touch File.join("a", "aaa2.txt")
+          touch File.join("a", "aaa3.txt")
+          touch File.join("a", "b", "bbb1.txt")
+          touch File.join("a", "b", "bbb2.txt")
+          touch File.join("a", "b", "bbb3.txt")
+          touch File.join("a", "b", "c", "ccc1.txt")
+          touch File.join("a", "b", "c", "ccc2.txt")
+          touch File.join("a", "b", "c", "ccc3.txt")          
+          yield block
+        end
+      end
+    end    
+
+    with_test_dir do
+      rm_rf_ex "a"
+      sh 'ls -lr'
+      assert (not File.exists? "a")
+    end
+
+    with_test_dir do
+      rm_rf_ex "a", exceptions: ['aaa1.txt']
+      sh 'ls -lr'
+      assert (    File.exists? "a")
+      assert (    File.exists? (File.join("a", "aaa1.txt")))
+      assert (not File.exists? (File.join("a", "aaa2.txt")))
+      assert (not File.exists? (File.join("a", "aaa3.txt")))
+      assert (not File.exists? (File.join("a", "b")))
+    end
+
+    with_test_dir do
+      rm_rf_ex "a", exceptions: ['bogus']
+      sh 'ls -lr'
+      assert (not File.exists? "a")      
+    end
+
+    with_test_dir do
+      rm_rf_ex "a", exceptions: [File.join('b' 'bogus')]
+      sh 'ls -lr'
+      assert (not File.exists? "a")      
+    end
+
+    with_test_dir do
+      rm_rf_ex "a", exceptions: ['b']
+      sh 'ls -lr'
+      assert (    File.exists? "a")
+      assert (not File.exists? (File.join("a", "aaa1.txt")))
+      assert (not File.exists? (File.join("a", "aaa2.txt")))
+      assert (not File.exists? (File.join("a", "aaa3.txt")))
+      assert (    File.exists? (File.join("a", "b")))
+      assert (    File.exists? (File.join("a", "b", "bbb1.txt")))
+      assert (    File.exists? (File.join("a", "b", "bbb2.txt")))
+      assert (    File.exists? (File.join("a", "b", "bbb3.txt")))
+      assert (    File.exists? (File.join("a", "b", "c")))
+      assert (    File.exists? (File.join("a", "b", "c", "ccc1.txt")))
+      assert (    File.exists? (File.join("a", "b", "c", "ccc2.txt")))
+      assert (    File.exists? (File.join("a", "b", "c", "ccc3.txt")))
+    end
+
+    with_test_dir do
+      rm_rf_ex "a", exceptions: [File.join('b', 'bbb1.txt'), File.join('b', 'c', 'ccc3.txt') ]
+      sh 'ls -lr'
+      assert (    File.exists? "a")
+      assert (not File.exists? (File.join("a", "aaa1.txt")))
+      assert (not File.exists? (File.join("a", "aaa2.txt")))
+      assert (not File.exists? (File.join("a", "aaa3.txt")))
+      assert (    File.exists? (File.join("a", "b")))
+      assert (    File.exists? (File.join("a", "b", "bbb1.txt")))
+      assert (not File.exists? (File.join("a", "b", "bbb2.txt")))
+      assert (not File.exists? (File.join("a", "b", "bbb3.txt")))
+      assert (    File.exists? (File.join("a", "b", "c")))
+      assert (not File.exists? (File.join("a", "b", "c", "ccc1.txt")))
+      assert (not File.exists? (File.join("a", "b", "c", "ccc2.txt")))
+      assert (    File.exists? (File.join("a", "b", "c", "ccc3.txt")))
+    end
+  end
+end
+
+if __FILE__ == $0
+  require 'test/unit/ui/console/testrunner'
+  Test::Unit::UI::Console::TestRunner.run(RakeFileUtilsTests)
+end
\ No newline at end of file