changeset 22040:551da6052741

Merge.
author Roland Schatz <roland.schatz@oracle.com>
date Tue, 28 Jul 2015 18:33:42 +0200
parents 09531c471176 (current diff) fb6c6070b64d (diff)
children 7be57462fa84
files mx.cmd mx.sh mxtool/.project mxtool/.pydevproject mxtool/.pylintrc mxtool/CheckCopyright.java mxtool/ClasspathDump.java mxtool/URLConnectionDownload.java mxtool/copyrights/oracle.copyright.hash mxtool/copyrights/oracle.copyright.regex.hash mxtool/copyrights/oracle.copyright.regex.star mxtool/copyrights/oracle.copyright.star mxtool/copyrights/oracle.copyright.upl.regex.star mxtool/copyrights/oracle.copyright.upl.star mxtool/mx mxtool/mx.py truffle/com.oracle.truffle.api/src/com/oracle/truffle/api/vm/TruffleVM.java
diffstat 29 files changed, 142 insertions(+), 8898 deletions(-) [+]
line wrap: on
line diff
--- a/mx.cmd	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-python %~dp0/mxtool/mx.py %*
--- a/mx.sh	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-mxtool/mx
\ No newline at end of file
--- a/mx.truffle/.project	Sat Jul 25 10:25:36 2015 +0200
+++ b/mx.truffle/.project	Tue Jul 28 18:33:42 2015 +0200
@@ -3,7 +3,7 @@
 	<name>mx.truffle</name>
 	<comment></comment>
 	<projects>
-		<project>mxtool</project>
+		<project>mxtool2</project>
 	</projects>
 	<buildSpec>
 		<buildCommand>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mx.truffle/copyrights/oracle.copyright.upl.regex.star	Tue Jul 28 18:33:42 2015 +0200
@@ -0,0 +1,1 @@
+/\*\n \* Copyright \(c\) (?:(20[0-9][0-9]), )?(20[0-9][0-9]), Oracle and/or its affiliates\. All rights reserved\.\n \* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER\.\n \*\n \* The Universal Permissive License \(UPL\), Version 1\.0\n \* \n \* Subject to the condition set forth below, permission is hereby granted to any\n \* person obtaining a copy of this software, associated documentation and/or\n \* data \(collectively the "Software"\), free of charge and under any and all\n \* copyright rights in the Software, and any and all patent rights owned or\n \* freely licensable by each licensor hereunder covering either \(i\) the\n \* unmodified Software as contributed to or provided by such licensor, or \(ii\)\n \* the Larger Works \(as defined below\), to deal in both\n \* \n \* \(a\) the Software, and\n \* \n \* \(b\) any piece of software and/or hardware listed in the lrgrwrks\.txt file if\n \* one is included with the Software each a "Larger Work" to which the Software\n \* is contributed by such licensors\),\n \* \n \* without restriction, including without limitation the rights to copy, create\n \* derivative works of, display, perform, and distribute the Software and make,\n \* use, sell, offer for sale, import, export, have made, and have sold the\n \* Software and the Larger Work\(s\), and to sublicense the foregoing rights on\n \* either these or other terms\.\n \* \n \* This license is subject to the following condition:\n \* \n \* The above copyright notice and either this complete permission notice or at a\n \* minimum a reference to the UPL must be included in all copies or substantial\n \* portions of the Software\.\n \* \n \* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n \* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n \* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\. IN NO EVENT SHALL THE\n \* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n \* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n \* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n \* SOFTWARE\.\n \*/\n.*
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mx.truffle/copyrights/oracle.copyright.upl.star	Tue Jul 28 18:33:42 2015 +0200
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * The Universal Permissive License (UPL), Version 1.0
+ * 
+ * Subject to the condition set forth below, permission is hereby granted to any
+ * person obtaining a copy of this software, associated documentation and/or
+ * data (collectively the "Software"), free of charge and under any and all
+ * copyright rights in the Software, and any and all patent rights owned or
+ * freely licensable by each licensor hereunder covering either (i) the
+ * unmodified Software as contributed to or provided by such licensor, or (ii)
+ * the Larger Works (as defined below), to deal in both
+ * 
+ * (a) the Software, and
+ * 
+ * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
+ * one is included with the Software each a "Larger Work" to which the Software
+ * is contributed by such licensors),
+ * 
+ * without restriction, including without limitation the rights to copy, create
+ * derivative works of, display, perform, and distribute the Software and make,
+ * use, sell, offer for sale, import, export, have made, and have sold the
+ * Software and the Larger Work(s), and to sublicense the foregoing rights on
+ * either these or other terms.
+ * 
+ * This license is subject to the following condition:
+ * 
+ * The above copyright notice and either this complete permission notice or at a
+ * minimum a reference to the UPL must be included in all copies or substantial
+ * portions of the Software.
+ * 
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
--- a/mx.truffle/mx_truffle.py	Sat Jul 25 10:25:36 2015 +0200
+++ b/mx.truffle/mx_truffle.py	Tue Jul 28 18:33:42 2015 +0200
@@ -26,1184 +26,19 @@
 #
 # ----------------------------------------------------------------------------------------------------
 
-import os, stat, errno, sys, shutil, zipfile, tarfile, tempfile, re, time, datetime, platform, subprocess, socket
-from os.path import join, exists, dirname, basename
-from argparse import ArgumentParser, RawDescriptionHelpFormatter, REMAINDER
 import mx
-import xml.dom.minidom
-import itertools
-import json, textwrap
-import fnmatch
-
-# This works because when mx loads this file, it makes sure __file__ gets an absolute path
-_graal_home = dirname(dirname(__file__))
-
-""" The VM that will be run by the 'vm' command and built by default by the 'build' command.
-    This can be set via the global '--vm' option or the DEFAULT_VM environment variable.
-    It can also be temporarily set by using of a VM context manager object in a 'with' statement. """
-_vm = None
-
-_make_eclipse_launch = False
-
-_minVersion = mx.VersionSpec('1.7')
-
-# max version (first _unsupported_ version)
-_untilVersion = None
-
-class JDKDeployedDist:
-    def __init__(self, name, isExtension=False, usesJVMCIClassLoader=False, partOfHotSpot=False):
-        self.name = name
-        self.isExtension = isExtension
-        self.usesJVMCIClassLoader = usesJVMCIClassLoader
-        self.partOfHotSpot = partOfHotSpot # true when this distribution is delivered with HotSpot
-
-_jdkDeployedDists = [
-    JDKDeployedDist('TRUFFLE'),
-]
-
-JDK_UNIX_PERMISSIONS_DIR = 0755
-JDK_UNIX_PERMISSIONS_FILE = 0644
-JDK_UNIX_PERMISSIONS_EXEC = 0755
-
-def isVMSupported(vm):
-    if 'client' == vm and len(platform.mac_ver()[0]) != 0:
-        # Client VM not supported: java launcher on Mac OS X translates '-client' to '-server'
-        return False
-    return True
-
-def _get_vm():
-    """
-    Gets the configured VM, presenting a dialogue if there is no currently configured VM.
-    """
-    global _vm
-    if _vm:
-        return _vm
-    vm = mx.get_env('DEFAULT_VM')
-    if vm is None:
-        extras = mx.get_env('EXTRA_JAVA_HOMES')
-        if not extras is None:
-            for e in extras.split(':'):
-                vm = e
-                break
-    envPath = join(_graal_home, 'mx', 'env')
-    if vm and 'graal' in vm:
-        if exists(envPath):
-            with open(envPath) as fp:
-                if 'DEFAULT_VM=' + vm in fp.read():
-                    mx.log('Please update the DEFAULT_VM value in ' + envPath + ' to replace "graal" with "jvmci"')
-        vm = vm.replace('graal', 'jvmci')
-    if vm is None:
-        mx.abort('Need to specify VM with --vm option or DEFAULT_VM environment variable')
-    _vm = vm
-    return vm
-
-def chmodRecursive(dirname, chmodFlagsDir):
-    if mx.get_os() == 'windows':
-        return
-
-    def _chmodDir(chmodFlags, dirname, fnames):
-        os.chmod(dirname, chmodFlagsDir)
-
-    os.path.walk(dirname, _chmodDir, chmodFlagsDir)
-
-def clean(args):
-    """clean the source tree"""
-    opts = mx.clean(args, parser=ArgumentParser(prog='mx clean'))
-
-    if opts.native:
-        def handleRemoveReadonly(func, path, exc):
-            excvalue = exc[1]
-            if mx.get_os() == 'windows' and func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES:
-                os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)  # 0777
-                func(path)
-            else:
-                raise
-
-        def rmIfExists(name):
-            if os.path.isdir(name):
-                shutil.rmtree(name, ignore_errors=False, onerror=handleRemoveReadonly)
-            elif os.path.isfile(name):
-                os.unlink(name)
-
-        rmIfExists(join(_graal_home, 'build'))
-        rmIfExists(join(_graal_home, 'build-nojvmci'))
-
-def export(args):
-    """create archives of builds split by vmbuild and vm"""
-
-    parser = ArgumentParser(prog='mx export')
-    args = parser.parse_args(args)
-
-    # collect data about export
-    infos = dict()
-    infos['timestamp'] = time.time()
-
-    hgcfg = mx.HgConfig()
-    hgcfg.check()
-    infos['revision'] = hgcfg.tip('.') + ('+' if hgcfg.isDirty('.') else '')
-    # TODO: infos['repository']
-
-    infos['jdkversion'] = str(mx.java().version)
-
-    infos['architecture'] = mx.get_arch()
-    infos['platform'] = mx.get_os()
-
-    if mx.get_os != 'windows':
-        pass
-        # infos['ccompiler']
-        # infos['linker']
-
-    infos['hostname'] = socket.gethostname()
-
-    def _writeJson(suffix, properties):
-        d = infos.copy()
-        for k, v in properties.iteritems():
-            assert not d.has_key(k)
-            d[k] = v
-
-        jsonFileName = 'export-' + suffix + '.json'
-        with open(jsonFileName, 'w') as f:
-            print >> f, json.dumps(d)
-        return jsonFileName
-
-
-    def _genFileName(archivtype, middle):
-        idPrefix = infos['revision'] + '_'
-        idSuffix = '.tar.gz'
-        return join(_graal_home, "graalvm_" + archivtype + "_" + idPrefix + middle + idSuffix)
-
-    # graal directory
-    graalDirTarName = _genFileName('classfiles', 'javac')
-    mx.logv("creating graal " + graalDirTarName)
-    with tarfile.open(graalDirTarName, 'w:gz') as tar:
-        for root, _, files in os.walk("graal"):
-            for f in [f for f in files if not f.endswith('.java')]:
-                name = join(root, f)
-                # print name
-                tar.add(name, name)
-
-        n = _writeJson("graal", {'javacompiler' : 'javac'})
-        tar.add(n, n)
-
-
-def _run_benchmark(args, availableBenchmarks, runBenchmark):
-
-    vmOpts, benchmarksAndOptions = _extract_VM_args(args, useDoubleDash=availableBenchmarks is None)
-
-    if availableBenchmarks is None:
-        harnessArgs = benchmarksAndOptions
-        return runBenchmark(None, harnessArgs, vmOpts)
-
-    if len(benchmarksAndOptions) == 0:
-        mx.abort('at least one benchmark name or "all" must be specified')
-    benchmarks = list(itertools.takewhile(lambda x: not x.startswith('-'), benchmarksAndOptions))
-    harnessArgs = benchmarksAndOptions[len(benchmarks):]
-
-    if 'all' in benchmarks:
-        benchmarks = availableBenchmarks
-    else:
-        for bm in benchmarks:
-            if bm not in availableBenchmarks:
-                mx.abort('unknown benchmark: ' + bm + '\nselect one of: ' + str(availableBenchmarks))
-
-    failed = []
-    for bm in benchmarks:
-        if not runBenchmark(bm, harnessArgs, vmOpts):
-            failed.append(bm)
-
-    if len(failed) != 0:
-        mx.abort('Benchmark failures: ' + str(failed))
-
-def _vmLibDirInJdk(jdk):
-    """
-    Get the directory within a JDK where the server and client
-    subdirectories are located.
-    """
-    mxos = mx.get_os()
-    if mxos == 'darwin':
-        return join(jdk, 'jre', 'lib')
-    if mxos == 'windows' or mxos == 'cygwin':
-        return join(jdk, 'jre', 'bin')
-    return join(jdk, 'jre', 'lib', mx.get_arch())
-
-def _vmJliLibDirs(jdk):
-    """
-    Get the directories within a JDK where the jli library designates to.
-    """
-    mxos = mx.get_os()
-    if mxos == 'darwin':
-        return [join(jdk, 'jre', 'lib', 'jli')]
-    if mxos == 'windows' or mxos == 'cygwin':
-        return [join(jdk, 'jre', 'bin'), join(jdk, 'bin')]
-    return [join(jdk, 'jre', 'lib', mx.get_arch(), 'jli'), join(jdk, 'lib', mx.get_arch(), 'jli')]
-
-def _vmCfgInJdk(jdk, jvmCfgFile='jvm.cfg'):
-    """
-    Get the jvm.cfg file.
-    """
-    mxos = mx.get_os()
-    if mxos == "windows" or mxos == "cygwin":
-        return join(jdk, 'jre', 'lib', mx.get_arch(), jvmCfgFile)
-    return join(_vmLibDirInJdk(jdk), jvmCfgFile)
-
-def _jdksDir():
-    return os.path.abspath(join(_graal_home, 'jdk' + str(mx.java().version)))
-
-def _handle_missing_VM(bld, vm=None):
-    if not vm:
-        vm = _get_vm()
-    mx.log('The ' + bld + ' ' + vm + ' VM has not been created')
-    mx.abort('You need to run "mx --javahome ' + vm + ' use the selected VM')
-
-def _jdk(build=None, vmToCheck=None, create=False, installJars=True):
-    """
-    Get the JDK into which Graal is installed, creating it first if necessary.
-    """
-    jdk = join(_jdksDir(), build)
-    if create:
-        srcJdk = mx.java().jdk
-        if not exists(jdk):
-            mx.log('Creating ' + jdk + ' from ' + srcJdk)
-            shutil.copytree(srcJdk, jdk)
-
-            # Make a copy of the default VM so that this JDK can be
-            # reliably used as the bootstrap for a HotSpot build.
-            jvmCfg = _vmCfgInJdk(jdk)
-            if not exists(jvmCfg):
-                mx.abort(jvmCfg + ' does not exist')
-
-            defaultVM = None
-            jvmCfgLines = []
-            with open(jvmCfg) as f:
-                for line in f:
-                    if line.startswith('-') and defaultVM is None:
-                        parts = line.split()
-                        if len(parts) == 2:
-                            assert parts[1] == 'KNOWN', parts[1]
-                            defaultVM = parts[0][1:]
-                            jvmCfgLines += ['# default VM is a copy of the unmodified ' + defaultVM + ' VM\n']
-                            jvmCfgLines += ['-original KNOWN\n']
-                        else:
-                            # skip lines which we cannot parse (e.g. '-hotspot ALIASED_TO -client')
-                            mx.log("WARNING: skipping not parsable line \"" + line + "\"")
-                    else:
-                        jvmCfgLines += [line]
-
-            assert defaultVM is not None, 'Could not find default VM in ' + jvmCfg
-            chmodRecursive(jdk, JDK_UNIX_PERMISSIONS_DIR)
-            shutil.move(join(_vmLibDirInJdk(jdk), defaultVM), join(_vmLibDirInJdk(jdk), 'original'))
-
-            if mx.get_os() != 'windows':
-                os.chmod(jvmCfg, JDK_UNIX_PERMISSIONS_FILE)
-            with open(jvmCfg, 'w') as fp:
-                for line in jvmCfgLines:
-                    fp.write(line)
-
-            # patch 'release' file (append graalvm revision)
-            releaseFile = join(jdk, 'release')
-            if exists(releaseFile):
-                releaseFileLines = []
-                with open(releaseFile) as f:
-                    for line in f:
-                        releaseFileLines.append(line)
-
-                if mx.get_os() != 'windows':
-                    os.chmod(releaseFile, JDK_UNIX_PERMISSIONS_FILE)
-                with open(releaseFile, 'w') as fp:
-                    for line in releaseFileLines:
-                        if line.startswith("SOURCE="):
-                            try:
-                                sourceLine = line[0:-2]  # remove last char
-                                hgcfg = mx.HgConfig()
-                                hgcfg.check()
-                                revision = hgcfg.tip('.')[:12]  # take first 12 chars
-                                fp.write(sourceLine + ' graal:' + revision + '\"\n')
-                            except:
-                                fp.write(line)
-                        else:
-                            fp.write(line)
-
-            # Install a copy of the disassembler library
-            try:
-                hsdis([], copyToDir=_vmLibDirInJdk(jdk))
-            except SystemExit:
-                pass
-    else:
-        if not exists(jdk):
-            _handle_missing_VM(build, vmToCheck)
-
-    if installJars:
-        for jdkDist in _jdkDeployedDists:
-            dist = mx.distribution(jdkDist.name)
-            if exists(dist.path) and jdkDist.partOfHotSpot:
-                _installDistInJdks(jdkDist)
-
-    return jdk
 
-def _updateInstalledJVMCIOptionsFile(jdk):
-    jvmciOptions = join(_graal_home, 'jvmci.options')
-    jreLibDir = join(jdk, 'jre', 'lib')
-    if exists(jvmciOptions):
-        shutil.copy(jvmciOptions, join(jreLibDir, 'jvmci.options'))
-    else:
-        toDelete = join(jreLibDir, 'jvmci.options')
-        if exists(toDelete):
-            os.unlink(toDelete)
-
-def _makeHotspotGeneratedSourcesDir():
-    """
-    Gets the directory containing all the HotSpot sources generated from
-    JVMCI Java sources. This directory will be created if it doesn't yet exist.
-    """
-    hsSrcGenDir = join(mx.project('com.oracle.jvmci.hotspot').source_gen_dir(), 'hotspot')
-    if not exists(hsSrcGenDir):
-        os.makedirs(hsSrcGenDir)
-    return hsSrcGenDir
-
-def _copyToJdk(src, dst, permissions=JDK_UNIX_PERMISSIONS_FILE):
-    name = os.path.basename(src)
-    dstLib = join(dst, name)
-    if mx.get_env('SYMLINK_GRAAL_JAR', None) == 'true':
-        # Using symlinks is much faster than copying but may
-        # cause issues if the lib is being updated while
-        # the VM is running.
-        if not os.path.islink(dstLib) or not os.path.realpath(dstLib) == src:
-            if exists(dstLib):
-                os.remove(dstLib)
-            os.symlink(src, dstLib)
-    else:
-        # do a copy and then a move to get atomic updating (on Unix)
-        fd, tmp = tempfile.mkstemp(suffix='', prefix=name, dir=dst)
-        shutil.copyfile(src, tmp)
-        os.close(fd)
-        shutil.move(tmp, dstLib)
-        os.chmod(dstLib, permissions)
-
-def _filterJVMCIServices(servicesMap, classpath):
-    """
-    Filters and returns the names in 'serviceImplNames' that denote
-    types available in 'classpath' implementing or extending
-    com.oracle.jvmci.service.Service.
-    """
-    _, binDir = mx._compile_mx_class('FilterTypes', os.pathsep.join(classpath), myDir=dirname(__file__))
-    serialized = [k + '=' + ','.join(v) for k, v in servicesMap.iteritems()]
-    cmd = [mx.java().java, '-cp', mx._cygpathU2W(os.pathsep.join([binDir] + classpath)), 'FilterTypes', 'com.oracle.jvmci.service.Service'] + serialized
-    serialized = subprocess.check_output(cmd)
-    if len(serialized) == 0:
-        return {}
-    servicesMap = {}
-    for e in serialized.split(' '):
-        k, v = e.split('=')
-        impls = v.split(',')
-        servicesMap[k] = impls
-    return servicesMap
-
-def _extractJVMCIFiles(jdkJars, jvmciJars, servicesDir, optionsDir, cleanDestination=True):
-    if cleanDestination:
-        if exists(servicesDir):
-            shutil.rmtree(servicesDir)
-        if exists(optionsDir):
-            shutil.rmtree(optionsDir)
-    if not exists(servicesDir):
-        os.makedirs(servicesDir)
-    if not exists(optionsDir):
-        os.makedirs(optionsDir)
-    servicesMap = {}
-    optionsFiles = []
-    for jar in jvmciJars:
-        if os.path.isfile(jar):
-            with zipfile.ZipFile(jar) as zf:
-                for member in zf.namelist():
-                    if member.startswith('META-INF/services') and member:
-                        serviceName = basename(member)
-                        if serviceName == "":
-                            continue # Zip files may contain empty entries for directories (jar -cf ... creates such)
-                        # we don't handle directories
-                        assert serviceName and member == 'META-INF/services/' + serviceName
-                        with zf.open(member) as serviceFile:
-                            serviceImpls = servicesMap.setdefault(serviceName, [])
-                            for line in serviceFile.readlines():
-                                line = line.strip()
-                                if line:
-                                    serviceImpls.append(line)
-                    elif member.startswith('META-INF/options'):
-                        filename = basename(member)
-                        if filename == "":
-                            continue # Zip files may contain empty entries for directories (jar -cf ... creates such)
-                        # we don't handle directories
-                        assert filename and member == 'META-INF/options/' + filename
-                        targetpath = join(optionsDir, filename)
-                        optionsFiles.append(filename)
-                        with zf.open(member) as optionsFile, \
-                             file(targetpath, "wb") as target:
-                            shutil.copyfileobj(optionsFile, target)
-    servicesMap = _filterJVMCIServices(servicesMap, jdkJars)
-    for serviceName, serviceImpls in servicesMap.iteritems():
-        fd, tmp = tempfile.mkstemp(prefix=serviceName)
-        f = os.fdopen(fd, 'w+')
-        for serviceImpl in serviceImpls:
-            f.write(serviceImpl + os.linesep)
-        target = join(servicesDir, serviceName)
-        f.close()
-        shutil.move(tmp, target)
-        if mx.get_os() != 'windows':
-            os.chmod(target, JDK_UNIX_PERMISSIONS_FILE)
-
-def _updateJVMCIFiles(jdkDir):
-    jreJVMCIDir = join(jdkDir, 'jre', 'lib', 'jvmci')
-    jvmciJars = [join(jreJVMCIDir, e) for e in os.listdir(jreJVMCIDir) if e.endswith('.jar')]
-    jreJVMCIServicesDir = join(jreJVMCIDir, 'services')
-    jreJVMCIOptionsDir = join(jreJVMCIDir, 'options')
-    _extractJVMCIFiles(_getJdkDeployedJars(jdkDir), jvmciJars, jreJVMCIServicesDir, jreJVMCIOptionsDir)
-
-def _patchGraalVersionConstant(dist):
-    """
-    Patches the constant "@@@@@@@@@@@@@@@@graal.version@@@@@@@@@@@@@@@@" in the constant pool of Graal.class
-    with the computed Graal version string.
-    """
-    zf = zipfile.ZipFile(dist.path, 'r')
-    graalClassfilePath = 'com/oracle/graal/api/runtime/Graal.class'
-    try:
-        graalClassfile = zf.read(graalClassfilePath)
-    except KeyError:
-        mx.log(graalClassfilePath + ' is not present in ' + dist.path)
-        return
-    placeholder = '@@@@@@@@@@@@@@@@graal.version@@@@@@@@@@@@@@@@'
-    placeholderLen = len(placeholder)
-    versionSpec = '{:' + str(placeholderLen) + '}'
-    versionStr = versionSpec.format(graal_version())
-
-    if len(versionStr) > placeholderLen:
-        # Truncate the version string if necessary
-        assert versionStr.startswith('unknown'), versionStr
-        versionStr = versionStr[:placeholderLen]
-    if placeholder not in graalClassfile:
-        assert versionStr in graalClassfile, 'could not find "' + placeholder + '" or "' + versionStr + '" constant in ' + dist.path + '!' + graalClassfilePath
-        zf.close()
-        return False
-
-    zfOutFd, zfOutPath = tempfile.mkstemp(suffix='', prefix=basename(dist.path) + '.', dir=dirname(dist.path))
-    zfOut = zipfile.ZipFile(zfOutPath, 'w')
-    for zi in zf.infolist():
-        if zi.filename == graalClassfilePath:
-            data = graalClassfile.replace(placeholder, versionStr)
-        else:
-            data = zf.read(zi)
-        zfOut.writestr(zi, data)
-    zfOut.close()
-    os.close(zfOutFd)
-    zf.close()
-    shutil.move(zfOutPath, dist.path)
+from mx_unittest import unittest
+from mx_gate import Task
+import mx_gate
 
-def _installDistInJdks(deployableDist):
-    """
-    Installs the jar(s) for a given Distribution into all existing JVMCI JDKs
-    """
-    if True:
-        return
-    dist = mx.distribution(deployableDist.name)
-    if dist.name == 'GRAAL':
-        _patchGraalVersionConstant(dist)
-
-    jdks = _jdksDir()
-    if exists(jdks):
-        for e in os.listdir(jdks):
-            jdkDir = join(jdks, e)
-            jreLibDir = join(jdkDir, 'jre', 'lib')
-            if exists(jreLibDir):
-                if deployableDist.isExtension:
-                    targetDir = join(jreLibDir, 'ext')
-                elif deployableDist.usesJVMCIClassLoader:
-                    targetDir = join(jreLibDir, 'jvmci')
-                else:
-                    targetDir = jreLibDir
-                if not exists(targetDir):
-                    os.makedirs(targetDir)
-                _copyToJdk(dist.path, targetDir)
-                if dist.sourcesPath:
-                    _copyToJdk(dist.sourcesPath, jdkDir)
-                if deployableDist.usesJVMCIClassLoader:
-                    # deploy service files
-                    _updateJVMCIFiles(jdkDir)
-
-def _getJdkDeployedJars(jdkDir):
-    """
-    Gets jar paths for all deployed distributions in the context of
-    a given JDK directory.
-    """
-    jreLibDir = join(jdkDir, 'jre', 'lib')
-    jars = []
-    for dist in _jdkDeployedDists:
-        jar = basename(mx.distribution(dist.name).path)
-        if dist.isExtension:
-            jars.append(join(jreLibDir, 'ext', jar))
-        elif dist.usesJVMCIClassLoader:
-            jars.append(join(jreLibDir, 'jvmci', jar))
-        else:
-            jars.append(join(jreLibDir, jar))
-    return jars
-
-
-# run a command in the windows SDK Debug Shell
-def _runInDebugShell(cmd, workingDir, logFile=None, findInOutput=None, respondTo=None):
-    if respondTo is None:
-        respondTo = {}
-    newLine = os.linesep
-    startToken = 'RUNINDEBUGSHELL_STARTSEQUENCE'
-    endToken = 'RUNINDEBUGSHELL_ENDSEQUENCE'
-
-    winSDK = mx.get_env('WIN_SDK', 'C:\\Program Files\\Microsoft SDKs\\Windows\\v7.1\\')
-
-    if not exists(mx._cygpathW2U(winSDK)):
-        mx.abort("Could not find Windows SDK : '" + winSDK + "' does not exist")
-
-    winSDKSetEnv = mx._cygpathW2U(join(winSDK, 'Bin', 'SetEnv.cmd'))
-    if not exists(winSDKSetEnv):
-        mx.abort("Invalid Windows SDK path (" + winSDK + ") : could not find Bin/SetEnv.cmd (you can use the WIN_SDK environment variable to specify an other path)")
-
-    wincmd = 'cmd.exe /E:ON /V:ON /K "' + mx._cygpathU2W(winSDKSetEnv) + '"'
-    p = subprocess.Popen(wincmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    stdout = p.stdout
-    stdin = p.stdin
-    if logFile:
-        log = open(logFile, 'w')
-    ret = False
-
-    def _writeProcess(s):
-        stdin.write(s + newLine)
-
-    _writeProcess("echo " + startToken)
-    while True:
-        # encoding may be None on windows plattforms
-        if sys.stdout.encoding is None:
-            encoding = 'utf-8'
-        else:
-            encoding = sys.stdout.encoding
-
-        line = stdout.readline().decode(encoding)
-        if logFile:
-            log.write(line.encode('utf-8'))
-        line = line.strip()
-        mx.log(line)
-        if line == startToken:
-            _writeProcess('cd /D ' + workingDir + ' & ' + cmd + ' & echo ' + endToken)
-        for regex in respondTo.keys():
-            match = regex.search(line)
-            if match:
-                _writeProcess(respondTo[regex])
-        if findInOutput:
-            match = findInOutput.search(line)
-            if match:
-                ret = True
-        if line == endToken:
-            if not findInOutput:
-                _writeProcess('echo ERRXXX%errorlevel%')
-            else:
-                break
-        if line.startswith('ERRXXX'):
-            if line == 'ERRXXX0':
-                ret = True
-            break
-    _writeProcess("exit")
-    if logFile:
-        log.close()
-    return ret
-
-def jdkhome(vm=None):
-    """return the JDK directory selected for the 'vm' command"""
-    return _jdk(installJars=False)
-
-def print_jdkhome(args, vm=None):
-    """print the JDK directory selected for the 'vm' command"""
-    print jdkhome(vm)
-
-def buildvars(args):
-    """describe the variables that can be set by the -D option to the 'mx build' commmand"""
-
-    buildVars = {
-        'ALT_BOOTDIR' : 'The location of the bootstrap JDK installation (default: ' + mx.java().jdk + ')',
-        'ALT_OUTPUTDIR' : 'Build directory',
-        'HOTSPOT_BUILD_JOBS' : 'Number of CPUs used by make (default: ' + str(mx.cpu_count()) + ')',
-        'INSTALL' : 'Install the built VM into the JDK? (default: y)',
-        'ZIP_DEBUGINFO_FILES' : 'Install zipped debug symbols file? (default: 0)',
-    }
-
-    mx.log('HotSpot build variables that can be set by the -D option to "mx build":')
-    mx.log('')
-    for n in sorted(buildVars.iterkeys()):
-        mx.log(n)
-        mx.log(textwrap.fill(buildVars[n], initial_indent='    ', subsequent_indent='    ', width=200))
-
-    mx.log('')
-    mx.log('Note that these variables can be given persistent values in the file ' + join(_graal_home, 'mx', 'env') + ' (see \'mx about\').')
-
-cached_graal_version = None
-
-def graal_version(dev_suffix='dev'):
-    global cached_graal_version
-
-    if not cached_graal_version:
-        # extract latest release tag for graal
-        try:
-            tags = [x.split() for x in subprocess.check_output(['hg', '-R', _graal_home, 'tags']).split('\n') if x.startswith("graal-")]
-            current_id = subprocess.check_output(['hg', '-R', _graal_home, 'log', '--template', '{rev}\n', '--rev', 'tip']).strip()
-        except:
-            # not a mercurial repository or hg commands are not available.
-            tags = None
-
-        if tags and current_id:
-            sorted_tags = sorted(tags, key=lambda e: [int(x) for x in e[0][len("graal-"):].split('.')], reverse=True)
-            most_recent_tag_name, most_recent_tag_revision = sorted_tags[0]
-            most_recent_tag_id = most_recent_tag_revision[:most_recent_tag_revision.index(":")]
-            most_recent_tag_version = most_recent_tag_name[len("graal-"):]
-
-            # tagged commit is one-off with commit that tags it
-            if int(current_id) - int(most_recent_tag_id) <= 1:
-                cached_graal_version = most_recent_tag_version
-            else:
-                major, minor = map(int, most_recent_tag_version.split('.'))
-                cached_graal_version = str(major) + '.' + str(minor + 1) + '-' + dev_suffix
-        else:
-            cached_graal_version = 'unknown-{0}'.format(platform.node())
-
-    return cached_graal_version
+_suite = mx.suite('truffle')
 
 def build(args, vm=None):
-    """build the VM binary
-
-    The global '--vm' and '--vmbuild' options select which VM type and build target to build."""
-
-    # Override to fail quickly if extra arguments are given
-    # at the end of the command line. This allows for a more
-    # helpful error message.
-    class AP(ArgumentParser):
-        def __init__(self):
-            ArgumentParser.__init__(self, prog='mx build')
-        def parse_args(self, args):
-            result = ArgumentParser.parse_args(self, args)
-            if len(result.remainder) != 0:
-                firstBuildTarget = result.remainder[0]
-                mx.abort('To specify the ' + firstBuildTarget + ' VM build target, you need to use the global "--vmbuild" option. For example:\n' +
-                         '    mx --vmbuild ' + firstBuildTarget + ' build')
-            return result
-
-    # Call mx.build to compile the Java sources
-    parser = AP()
-    parser.add_argument('-D', action='append', help='set a HotSpot build variable (run \'mx buildvars\' to list variables)', metavar='name=value')
-
-    opts2 = mx.build(['--source', '1.7'] + args, parser=parser)
+    """build the Java sources"""
+    opts2 = mx.build(['--source', '1.7'] + args)
     assert len(opts2.remainder) == 0
 
-def vmg(args):
-    """run the debug build of VM selected by the '--vm' option"""
-    return vm(args, vmbuild='debug')
-
-def vmfg(args):
-    """run the fastdebug build of VM selected by the '--vm' option"""
-    return vm(args, vmbuild='fastdebug')
-
-def _parseVmArgs(args, vm=None, cwd=None, vmbuild=None):
-    """run the VM selected by the '--vm' option"""
-
-    jdk = mx.java().jdk
-    mx.expand_project_in_args(args)
-    exe = join(jdk, 'bin', mx.exe_suffix('java'))
-    pfx = []
-
-    if '-version' in args:
-        ignoredArgs = args[args.index('-version') + 1:]
-        if len(ignoredArgs) > 0:
-            mx.log("Warning: The following options will be ignored by the vm because they come after the '-version' argument: " + ' '.join(ignoredArgs))
-
-    # Unconditionally prepend Truffle to the boot class path.
-    # This used to be done by the VM itself but was removed to
-    # separate the VM from Truffle.
-    truffle_jar = mx.archive(['@TRUFFLE'])[0]
-    args = ['-Xbootclasspath/p:' + truffle_jar] + args
-
-    args = mx.java().processArgs(args)
-    return (pfx, exe, vm, args, cwd)
-
-def vm(args, vm=None, nonZeroIsFatal=True, out=None, err=None, cwd=None, timeout=None, vmbuild=None):
-    (pfx_, exe_, _, args_, cwd) = _parseVmArgs(args, vm, cwd, vmbuild)
-    return mx.run(pfx_ + [exe_] + args_, nonZeroIsFatal=nonZeroIsFatal, out=out, err=err, cwd=cwd)
-
-def _find_classes_with_annotations(p, pkgRoot, annotations, includeInnerClasses=False):
-    """
-    Scan the sources of project 'p' for Java source files containing a line starting with 'annotation'
-    (ignoring preceding whitespace) and return the fully qualified class name for each Java
-    source file matched in a list.
-    """
-
-    matches = lambda line: len([a for a in annotations if line == a or line.startswith(a + '(')]) != 0
-    return p.find_classes_with_matching_source_line(pkgRoot, matches, includeInnerClasses)
-
-def _extract_VM_args(args, allowClasspath=False, useDoubleDash=False, defaultAllVMArgs=True):
-    """
-    Partitions a command line into a leading sequence of HotSpot VM options and the rest.
-    """
-    for i in range(0, len(args)):
-        if useDoubleDash:
-            if args[i] == '--':
-                vmArgs = args[:i]
-                remainder = args[i + 1:]
-                return vmArgs, remainder
-        else:
-            if not args[i].startswith('-'):
-                if i != 0 and (args[i - 1] == '-cp' or args[i - 1] == '-classpath'):
-                    if not allowClasspath:
-                        mx.abort('Cannot supply explicit class path option')
-                    else:
-                        continue
-                vmArgs = args[:i]
-                remainder = args[i:]
-                return vmArgs, remainder
-
-    if defaultAllVMArgs:
-        return args, []
-    else:
-        return [], args
-
-def _run_tests(args, harness, annotations, testfile, blacklist, whitelist, regex):
-
-
-    vmArgs, tests = _extract_VM_args(args)
-    for t in tests:
-        if t.startswith('-'):
-            mx.abort('VM option ' + t + ' must precede ' + tests[0])
-
-    candidates = {}
-    for p in mx.projects_opt_limit_to_suites():
-        if mx.java().javaCompliance < p.javaCompliance:
-            continue
-        for c in _find_classes_with_annotations(p, None, annotations).keys():
-            candidates[c] = p
-
-    classes = []
-    if len(tests) == 0:
-        classes = candidates.keys()
-        projectsCp = mx.classpath([pcp.name for pcp in mx.projects_opt_limit_to_suites() if pcp.javaCompliance <= mx.java().javaCompliance])
-    else:
-        projs = set()
-        found = False
-        if len(tests) == 1 and '#' in tests[0]:
-            words = tests[0].split('#')
-            if len(words) != 2:
-                mx.abort("Method specification is class#method: " + tests[0])
-            t, method = words
-
-            for c, p in candidates.iteritems():
-                # prefer exact matches first
-                if t == c:
-                    found = True
-                    classes.append(c)
-                    projs.add(p.name)
-            if not found:
-                for c, p in candidates.iteritems():
-                    if t in c:
-                        found = True
-                        classes.append(c)
-                        projs.add(p.name)
-            if not found:
-                mx.log('warning: no tests matched by substring "' + t)
-            elif len(classes) != 1:
-                mx.abort('More than one test matches substring {0} {1}'.format(t, classes))
-
-            classes = [c + "#" + method for c in classes]
-        else:
-            for t in tests:
-                if '#' in t:
-                    mx.abort('Method specifications can only be used in a single test: ' + t)
-                for c, p in candidates.iteritems():
-                    if t in c:
-                        found = True
-                        classes.append(c)
-                        projs.add(p.name)
-                if not found:
-                    mx.log('warning: no tests matched by substring "' + t)
-        projectsCp = mx.classpath(projs)
-
-    if blacklist:
-        classes = [c for c in classes if not any((glob.match(c) for glob in blacklist))]
-
-    if whitelist:
-        classes = [c for c in classes if any((glob.match(c) for glob in whitelist))]
-
-    if regex:
-        classes = [c for c in classes if re.search(regex, c)]
-
-    if len(classes) != 0:
-        f_testfile = open(testfile, 'w')
-        for c in classes:
-            f_testfile.write(c + '\n')
-        f_testfile.close()
-        harness(projectsCp, vmArgs)
-
-def _unittest(args, annotations, prefixCp="", blacklist=None, whitelist=None, verbose=False, fail_fast=False, enable_timing=False, regex=None, color=False, eager_stacktrace=False, gc_after_test=False):
-    testfile = os.environ.get('MX_TESTFILE', None)
-    if testfile is None:
-        (_, testfile) = tempfile.mkstemp(".testclasses", "graal")
-        os.close(_)
-
-    coreCp = mx.classpath(['com.oracle.truffle.tck', 'HCFDIS'])
-
-    coreArgs = []
-    if verbose:
-        coreArgs.append('-JUnitVerbose')
-    if fail_fast:
-        coreArgs.append('-JUnitFailFast')
-    if enable_timing:
-        coreArgs.append('-JUnitEnableTiming')
-    if color:
-        coreArgs.append('-JUnitColor')
-    if eager_stacktrace:
-        coreArgs.append('-JUnitEagerStackTrace')
-    if gc_after_test:
-        coreArgs.append('-JUnitGCAfterTest')
-
-
-    def harness(projectsCp, vmArgs):
-        if _get_vm() != 'jvmci':
-            prefixArgs = ['-esa', '-ea']
-        else:
-            prefixArgs = ['-XX:-BootstrapJVMCI', '-esa', '-ea']
-        if gc_after_test:
-            prefixArgs.append('-XX:-DisableExplicitGC')
-        with open(testfile) as fp:
-            testclasses = [l.rstrip() for l in fp.readlines()]
-
-        # Remove entries from class path that are in graal.jar and
-        # run the VM in a mode where application/test classes can
-        # access core Graal classes.
-        cp = prefixCp + coreCp + os.pathsep + projectsCp
-        if isJVMCIEnabled(_get_vm()):
-            excluded = set()
-            for jdkDist in _jdkDeployedDists:
-                dist = mx.distribution(jdkDist.name)
-                excluded.update([d.output_dir() for d in dist.sorted_deps()])
-            cp = os.pathsep.join([e for e in cp.split(os.pathsep) if e not in excluded])
-
-        # suppress menubar and dock when running on Mac
-        vmArgs = ['-Djava.awt.headless=true'] + vmArgs
-
-        if len(testclasses) == 1:
-            # Execute Junit directly when one test is being run. This simplifies
-            # replaying the VM execution in a native debugger (e.g., gdb).
-            vm(prefixArgs + vmArgs + ['-cp', mx._separatedCygpathU2W(cp), 'com.oracle.truffle.tck.TruffleJUnitCore'] + coreArgs + testclasses)
-        else:
-            vm(prefixArgs + vmArgs + ['-cp', mx._separatedCygpathU2W(cp), 'com.oracle.truffle.tck.TruffleJUnitCore'] + coreArgs + ['@' + mx._cygpathU2W(testfile)])
-
-    try:
-        _run_tests(args, harness, annotations, testfile, blacklist, whitelist, regex)
-    finally:
-        if os.environ.get('MX_TESTFILE') is None:
-            os.remove(testfile)
-
-_unittestHelpSuffix = """
-    Unittest options:
-
-      --blacklist <file>     run all testcases not specified in the blacklist
-      --whitelist <file>     run only testcases which are included
-                             in the given whitelist
-      --verbose              enable verbose JUnit output
-      --fail-fast            stop after first JUnit test class that has a failure
-      --enable-timing        enable JUnit test timing
-      --regex <regex>        run only testcases matching a regular expression
-      --color                enable colors output
-      --eager-stacktrace     print stacktrace eagerly
-      --gc-after-test        force a GC after each test
-
-    To avoid conflicts with VM options '--' can be used as delimiter.
-
-    If filters are supplied, only tests whose fully qualified name
-    includes a filter as a substring are run.
-
-    For example, this command line:
-
-       mx unittest -G:Dump= -G:MethodFilter=BC_aload.* -G:+PrintCFG BC_aload
-
-    will run all JUnit test classes that contain 'BC_aload' in their
-    fully qualified name and will pass these options to the VM:
-
-        -G:Dump= -G:MethodFilter=BC_aload.* -G:+PrintCFG
-
-    To get around command line length limitations on some OSes, the
-    JUnit class names to be executed are written to a file that a
-    custom JUnit wrapper reads and passes onto JUnit proper. The
-    MX_TESTFILE environment variable can be set to specify a
-    file which will not be deleted once the unittests are done
-    (unlike the temporary file otherwise used).
-
-    As with all other commands, using the global '-v' before 'unittest'
-    command will cause mx to show the complete command line
-    it uses to run the VM.
-"""
-
-def unittest(args):
-    """run the JUnit tests (all testcases){0}"""
-
-    parser = ArgumentParser(prog='mx unittest',
-          description='run the JUnit tests',
-          add_help=False,
-          formatter_class=RawDescriptionHelpFormatter,
-          epilog=_unittestHelpSuffix,
-        )
-    parser.add_argument('--blacklist', help='run all testcases not specified in the blacklist', metavar='<path>')
-    parser.add_argument('--whitelist', help='run testcases specified in whitelist only', metavar='<path>')
-    parser.add_argument('--verbose', help='enable verbose JUnit output', action='store_true')
-    parser.add_argument('--fail-fast', help='stop after first JUnit test class that has a failure', action='store_true')
-    parser.add_argument('--enable-timing', help='enable JUnit test timing', action='store_true')
-    parser.add_argument('--regex', help='run only testcases matching a regular expression', metavar='<regex>')
-    parser.add_argument('--color', help='enable color output', action='store_true')
-    parser.add_argument('--eager-stacktrace', help='print stacktrace eagerly', action='store_true')
-    parser.add_argument('--gc-after-test', help='force a GC after each test', action='store_true')
-
-    ut_args = []
-    delimiter = False
-    # check for delimiter
-    while len(args) > 0:
-        arg = args.pop(0)
-        if arg == '--':
-            delimiter = True
-            break
-        ut_args.append(arg)
-
-    if delimiter:
-        # all arguments before '--' must be recognized
-        parsed_args = parser.parse_args(ut_args)
-    else:
-        # parse all know arguments
-        parsed_args, args = parser.parse_known_args(ut_args)
-
-    if parsed_args.whitelist:
-        try:
-            with open(join(_graal_home, parsed_args.whitelist)) as fp:
-                parsed_args.whitelist = [re.compile(fnmatch.translate(l.rstrip())) for l in fp.readlines() if not l.startswith('#')]
-        except IOError:
-            mx.log('warning: could not read whitelist: ' + parsed_args.whitelist)
-    if parsed_args.blacklist:
-        try:
-            with open(join(_graal_home, parsed_args.blacklist)) as fp:
-                parsed_args.blacklist = [re.compile(fnmatch.translate(l.rstrip())) for l in fp.readlines() if not l.startswith('#')]
-        except IOError:
-            mx.log('warning: could not read blacklist: ' + parsed_args.blacklist)
-
-    _unittest(args, ['@Test', '@Parameters'], **parsed_args.__dict__)
-
-def shortunittest(args):
-    """alias for 'unittest --whitelist test/whitelist_shortunittest.txt'{0}"""
-
-    args = ['--whitelist', 'test/whitelist_shortunittest.txt'] + args
-    unittest(args)
-
-class Task:
-    # None or a list of strings. If not None, only tasks whose title
-    # matches at least one of the substrings in this list will return
-    # a non-None value from __enter__. The body of a 'with Task(...) as t'
-    # statement should check 't' and exit immediately if it is None.
-    filters = None
-    filtersExclude = False
-
-    def __init__(self, title, tasks=None):
-        self.tasks = tasks
-        self.title = title
-        if tasks is not None and Task.filters is not None:
-            if Task.filtersExclude:
-                self.skipped = any([f in title for f in Task.filters])
-            else:
-                self.skipped = not any([f in title for f in Task.filters])
-        else:
-            self.skipped = False
-        if not self.skipped:
-            self.start = time.time()
-            self.end = None
-            self.duration = None
-            mx.log(time.strftime('gate: %d %b %Y %H:%M:%S: BEGIN: ') + title)
-    def __enter__(self):
-        assert self.tasks is not None, "using Task with 'with' statement requires to pass the tasks list in the constructor"
-        if self.skipped:
-            return None
-        return self
-    def __exit__(self, exc_type, exc_value, traceback):
-        if not self.skipped:
-            self.tasks.append(self.stop())
-    def stop(self):
-        self.end = time.time()
-        self.duration = datetime.timedelta(seconds=self.end - self.start)
-        mx.log(time.strftime('gate: %d %b %Y %H:%M:%S: END:   ') + self.title + ' [' + str(self.duration) + ']')
-        return self
-    def abort(self, codeOrMessage):
-        self.end = time.time()
-        self.duration = datetime.timedelta(seconds=self.end - self.start)
-        mx.log(time.strftime('gate: %d %b %Y %H:%M:%S: ABORT: ') + self.title + ' [' + str(self.duration) + ']')
-        mx.abort(codeOrMessage)
-        return self
-
-def _basic_gate_body(args, tasks):
-    # Run unit tests on server-hosted-jvmci
-    with Task('UnitTests:hosted-product', tasks) as t:
-        if t: unittest(['--enable-timing', '--verbose', '--fail-fast'])
-
-
-def gate(args, gate_body=_basic_gate_body):
-    """run the tests used to validate a push
-
-    If this command exits with a 0 exit code, then the source code is in
-    a state that would be accepted for integration into the main repository."""
-
-    parser = ArgumentParser(prog='mx gate')
-    parser.add_argument('-j', '--omit-java-clean', action='store_false', dest='cleanJava', help='omit cleaning Java native code')
-    parser.add_argument('-n', '--omit-native-clean', action='store_false', dest='cleanNative', help='omit cleaning and building native code')
-    parser.add_argument('-i', '--omit-ide-clean', action='store_false', dest='cleanIde', help='omit cleaning the ide project files')
-    parser.add_argument('-g', '--only-build-jvmci', action='store_false', dest='buildNonJVMCI', help='only build the JVMCI VM')
-    parser.add_argument('-t', '--task-filter', help='comma separated list of substrings to select subset of tasks to be run')
-    parser.add_argument('-x', action='store_true', help='makes --task-filter an exclusion instead of inclusion filter')
-
-    args = parser.parse_args(args)
-
-    if args.task_filter:
-        Task.filters = args.task_filter.split(',')
-        Task.filtersExclude = args.x
-    elif args.x:
-        mx.abort('-x option cannot be used without --task-filter option')
-
-    # Force
-    if not mx._opts.strict_compliance:
-        mx.log("[gate] forcing strict compliance")
-        mx._opts.strict_compliance = True
-
-    tasks = []
-    total = Task('Gate')
-    try:
-        with Task('Pylint', tasks) as t:
-            if t: mx.pylint([])
-
-        def _clean(name='Clean'):
-            with Task(name, tasks) as t:
-                if t:
-                    cleanArgs = []
-                    if not args.cleanNative:
-                        cleanArgs.append('--no-native')
-                    if not args.cleanJava:
-                        cleanArgs.append('--no-java')
-                    clean(cleanArgs)
-        _clean()
-
-        with Task('IDEConfigCheck', tasks) as t:
-            if t:
-                if args.cleanIde:
-                    mx.ideclean([])
-                    mx.ideinit([])
-
-        eclipse_exe = mx.get_env('ECLIPSE_EXE')
-        if eclipse_exe is not None:
-            with Task('CodeFormatCheck', tasks) as t:
-                if t and mx.eclipseformat(['-e', eclipse_exe]) != 0:
-                    t.abort('Formatter modified files - run "mx eclipseformat", check in changes and repush')
-
-        with Task('Canonicalization Check', tasks) as t:
-            if t:
-                mx.log(time.strftime('%d %b %Y %H:%M:%S - Ensuring mx/projects files are canonicalized...'))
-                if mx.canonicalizeprojects([]) != 0:
-                    t.abort('Rerun "mx canonicalizeprojects" and check-in the modified mx/projects files.')
-
-        if mx.get_env('JDT'):
-            with Task('BuildJavaWithEcj', tasks):
-                if t: build(['-p', '--no-native', '--jdt-warning-as-error'])
-            _clean('CleanAfterEcjBuild')
-
-        with Task('BuildJavaWithJavac', tasks):
-            if t: build(['-p', '--no-native', '--force-javac'])
-
-        with Task('Checkstyle', tasks) as t:
-            if t and mx.checkstyle([]) != 0:
-                t.abort('Checkstyle warnings were found')
-
-        with Task('Checkheaders', tasks) as t:
-            if t and checkheaders([]) != 0:
-                t.abort('Checkheaders warnings were found')
-
-        with Task('FindBugs', tasks) as t:
-            if t and findbugs([]) != 0:
-                t.abort('FindBugs warnings were found')
-
-        gate_body(args, tasks)
-
-    except KeyboardInterrupt:
-        total.abort(1)
-
-    except BaseException as e:
-        import traceback
-        traceback.print_exc()
-        total.abort(str(e))
-
-    total.stop()
-
-    mx.log('Gate task times:')
-    for t in tasks:
-        mx.log('  ' + str(t.duration) + '\t' + t.title)
-    mx.log('  =======')
-    mx.log('  ' + str(total.duration))
-
-    if args.task_filter:
-        Task.filters = None
-
-def _igvJdk():
-    v8u20 = mx.VersionSpec("1.8.0_20")
-    v8u40 = mx.VersionSpec("1.8.0_40")
-    v8 = mx.VersionSpec("1.8")
-    def _igvJdkVersionCheck(version):
-        return version >= v8 and (version < v8u20 or version >= v8u40)
-    return mx.java_version(_igvJdkVersionCheck, versionDescription='>= 1.8 and < 1.8.0u20 or >= 1.8.0u40').jdk
-
-def _igvBuildEnv():
-        # When the http_proxy environment variable is set, convert it to the proxy settings that ant needs
-    env = dict(os.environ)
-    proxy = os.environ.get('http_proxy')
-    if not (proxy is None) and len(proxy) > 0:
-        if '://' in proxy:
-            # Remove the http:// prefix (or any other protocol prefix)
-            proxy = proxy.split('://', 1)[1]
-        # Separate proxy server name and port number
-        proxyName, proxyPort = proxy.split(':', 1)
-        proxyEnv = '-DproxyHost="' + proxyName + '" -DproxyPort=' + proxyPort
-        env['ANT_OPTS'] = proxyEnv
-
-    env['JAVA_HOME'] = _igvJdk()
-    return env
-
-def igv(args):
-    """run the Ideal Graph Visualizer"""
-    logFile = '.ideal_graph_visualizer.log'
-    with open(join(_graal_home, logFile), 'w') as fp:
-        mx.logv('[Ideal Graph Visualizer log is in ' + fp.name + ']')
-        nbplatform = join(_graal_home, 'src', 'share', 'tools', 'IdealGraphVisualizer', 'nbplatform')
-
-        # Remove NetBeans platform if it is earlier than the current supported version
-        if exists(nbplatform):
-            updateTrackingFile = join(nbplatform, 'platform', 'update_tracking', 'org-netbeans-core.xml')
-            if not exists(updateTrackingFile):
-                mx.log('Could not find \'' + updateTrackingFile + '\', removing NetBeans platform')
-                shutil.rmtree(nbplatform)
-            else:
-                dom = xml.dom.minidom.parse(updateTrackingFile)
-                currentVersion = mx.VersionSpec(dom.getElementsByTagName('module_version')[0].getAttribute('specification_version'))
-                supportedVersion = mx.VersionSpec('3.43.1')
-                if currentVersion < supportedVersion:
-                    mx.log('Replacing NetBeans platform version ' + str(currentVersion) + ' with version ' + str(supportedVersion))
-                    shutil.rmtree(nbplatform)
-                elif supportedVersion < currentVersion:
-                    mx.log('Supported NetBeans version in igv command should be updated to ' + str(currentVersion))
-
-        if not exists(nbplatform):
-            mx.logv('[This execution may take a while as the NetBeans platform needs to be downloaded]')
-
-        env = _igvBuildEnv()
-        # make the jar for Batik 1.7 available.
-        env['IGV_BATIK_JAR'] = mx.library('BATIK').get_path(True)
-        if mx.run(['ant', '-f', mx._cygpathU2W(join(_graal_home, 'src', 'share', 'tools', 'IdealGraphVisualizer', 'build.xml')), '-l', mx._cygpathU2W(fp.name), 'run'], env=env, nonZeroIsFatal=False):
-            mx.abort("IGV ant build & launch failed. Check '" + logFile + "'. You can also try to delete 'src/share/tools/IdealGraphVisualizer/nbplatform'.")
-
 def maven_install_truffle(args):
     """install Truffle into your local Maven repository"""
     for name in mx._dists:
@@ -1217,251 +52,26 @@
         if dot <= slash:
             mx.abort('Dot should be after / in ' + path)
         artifactId = path[slash + 1: dot]
-        mx.run(['mvn', 'install:install-file', '-DgroupId=com.oracle.' + dist.suite.name, '-DartifactId=' + artifactId, '-Dversion=' + graal_version('SNAPSHOT'), '-Dpackaging=jar', '-Dfile=' + path])
-
-def c1visualizer(args):
-    """run the Cl Compiler Visualizer"""
-    libpath = join(_graal_home, 'lib')
-    if mx.get_os() == 'windows':
-        executable = join(libpath, 'c1visualizer', 'bin', 'c1visualizer.exe')
-    else:
-        executable = join(libpath, 'c1visualizer', 'bin', 'c1visualizer')
-
-    # Check whether the current C1Visualizer installation is the up-to-date
-    if exists(executable) and not exists(mx.library('C1VISUALIZER_DIST').get_path(resolve=False)):
-        mx.log('Updating C1Visualizer')
-        shutil.rmtree(join(libpath, 'c1visualizer'))
-
-    archive = mx.library('C1VISUALIZER_DIST').get_path(resolve=True)
-
-    if not exists(executable):
-        zf = zipfile.ZipFile(archive, 'r')
-        zf.extractall(libpath)
-
-    if not exists(executable):
-        mx.abort('C1Visualizer binary does not exist: ' + executable)
-
-    if mx.get_os() != 'windows':
-        # Make sure that execution is allowed. The zip file does not always specfiy that correctly
-        os.chmod(executable, 0777)
-
-    mx.run([executable])
-
-def hsdis(args, copyToDir=None):
-    """download the hsdis library
-
-    This is needed to support HotSpot's assembly dumping features.
-    By default it downloads the Intel syntax version, use the 'att' argument to install AT&T syntax."""
-    flavor = 'intel'
-    if 'att' in args:
-        flavor = 'att'
-    if mx.get_arch() == "sparcv9":
-        flavor = "sparcv9"
-    lib = mx.add_lib_suffix('hsdis-' + mx.get_arch())
-    path = join(_graal_home, 'lib', lib)
-
-    sha1s = {
-        'att/hsdis-amd64.dll' : 'bcbd535a9568b5075ab41e96205e26a2bac64f72',
-        'att/hsdis-amd64.so' : '58919ba085d4ef7a513f25bae75e7e54ee73c049',
-        'intel/hsdis-amd64.dll' : '6a388372cdd5fe905c1a26ced614334e405d1f30',
-        'intel/hsdis-amd64.so' : '844ed9ffed64fe9599638f29a8450c50140e3192',
-        'intel/hsdis-amd64.dylib' : 'fdb13ef0d7d23d93dacaae9c98837bea0d4fc5a2',
-        'sparcv9/hsdis-sparcv9.so': '970640a9af0bd63641f9063c11275b371a59ee60',
-    }
-
-    flavoredLib = flavor + "/" + lib
-    if flavoredLib not in sha1s:
-        mx.logv("hsdis not supported on this plattform or architecture")
-        return
-
-    if not exists(path):
-        sha1 = sha1s[flavoredLib]
-        sha1path = path + '.sha1'
-        mx.download_file_with_sha1('hsdis', path, ['http://lafo.ssw.uni-linz.ac.at/hsdis/' + flavoredLib], sha1, sha1path, True, True, sources=False)
-    if copyToDir is not None and exists(copyToDir):
-        shutil.copy(path, copyToDir)
-
-def hcfdis(args):
-    """disassemble HexCodeFiles embedded in text files
-
-    Run a tool over the input files to convert all embedded HexCodeFiles
-    to a disassembled format."""
-
-    parser = ArgumentParser(prog='mx hcfdis')
-    parser.add_argument('-m', '--map', help='address to symbol map applied to disassembler output')
-    parser.add_argument('files', nargs=REMAINDER, metavar='files...')
-
-    args = parser.parse_args(args)
-
-    path = mx.library('HCFDIS').get_path(resolve=True)
-    mx.run_java(['-cp', path, 'com.oracle.max.hcfdis.HexCodeFileDis'] + args.files)
-
-    if args.map is not None:
-        addressRE = re.compile(r'0[xX]([A-Fa-f0-9]+)')
-        with open(args.map) as fp:
-            lines = fp.read().splitlines()
-        symbols = dict()
-        for l in lines:
-            addressAndSymbol = l.split(' ', 1)
-            if len(addressAndSymbol) == 2:
-                address, symbol = addressAndSymbol
-                if address.startswith('0x'):
-                    address = long(address, 16)
-                    symbols[address] = symbol
-        for f in args.files:
-            with open(f) as fp:
-                lines = fp.read().splitlines()
-            updated = False
-            for i in range(0, len(lines)):
-                l = lines[i]
-                for m in addressRE.finditer(l):
-                    sval = m.group(0)
-                    val = long(sval, 16)
-                    sym = symbols.get(val)
-                    if sym:
-                        l = l.replace(sval, sym)
-                        updated = True
-                        lines[i] = l
-            if updated:
-                mx.log('updating ' + f)
-                with open('new_' + f, "w") as fp:
-                    for l in lines:
-                        print >> fp, l
+        mx.run(['mvn', 'install:install-file', '-DgroupId=com.oracle.' + dist.suite.name, '-DartifactId=' + artifactId, '-Dversion=' + mx.suite('truffle').release_version('SNAPSHOT'), '-Dpackaging=jar', '-Dfile=' + path])
 
 def sl(args):
     """run an SL program"""
-    vmArgs, slArgs = _extract_VM_args(args)
-    vm(vmArgs + ['-cp', mx.classpath(["TRUFFLE", "com.oracle.truffle.sl"]), "com.oracle.truffle.sl.SLLanguage"] + slArgs)
+    vmArgs, slArgs = mx.extract_VM_args(args)
+    mx.run_java(vmArgs + ['-cp', mx.classpath(["TRUFFLE", "com.oracle.truffle.sl"]), "com.oracle.truffle.sl.SLLanguage"] + slArgs)
 
 def sldebug(args):
     """run a simple command line debugger for the Simple Language"""
-    vmArgs, slArgs = _extract_VM_args(args, useDoubleDash=True)
-    vm(vmArgs + ['-cp', mx.classpath("com.oracle.truffle.sl.tools"), "com.oracle.truffle.sl.tools.debug.SLREPLServer"] + slArgs)
-
-def isJVMCIEnabled(vm):
-    return vm != 'original' and not vm.endswith('nojvmci')
-
-def jol(args):
-    """Java Object Layout"""
-    joljar = mx.library('JOL_INTERNALS').get_path(resolve=True)
-    candidates = mx.findclass(args, logToConsole=False, matcher=lambda s, classname: s == classname or classname.endswith('.' + s) or classname.endswith('$' + s))
-
-    if len(candidates) > 0:
-        candidates = mx.select_items(sorted(candidates))
-    else:
-        # mx.findclass can be mistaken, don't give up yet
-        candidates = args
-
-    vm(['-javaagent:' + joljar, '-cp', os.pathsep.join([mx.classpath(), joljar]), "org.openjdk.jol.MainObjectInternals"] + candidates)
-
-def site(args):
-    """create a website containing javadoc and the project dependency graph"""
+    vmArgs, slArgs = mx.extract_VM_args(args, useDoubleDash=True)
+    mx.run_java(vmArgs + ['-cp', mx.classpath("com.oracle.truffle.sl.tools"), "com.oracle.truffle.sl.tools.debug.SLREPLServer"] + slArgs)
 
-    return mx.site(['--name', 'Graal',
-                    '--jd', '@-tag', '--jd', '@test:X',
-                    '--jd', '@-tag', '--jd', '@run:X',
-                    '--jd', '@-tag', '--jd', '@bug:X',
-                    '--jd', '@-tag', '--jd', '@summary:X',
-                    '--jd', '@-tag', '--jd', '@vmoption:X',
-                    '--overview', join(_graal_home, 'graal', 'overview.html'),
-                    '--title', 'Graal OpenJDK Project Documentation',
-                    '--dot-output-base', 'projects'] + args)
-
-def findbugs(args):
-    '''run FindBugs against non-test Java projects'''
-    findBugsHome = mx.get_env('FINDBUGS_HOME', None)
-    if findBugsHome:
-        findbugsJar = join(findBugsHome, 'lib', 'findbugs.jar')
-    else:
-        findbugsLib = join(_graal_home, 'lib', 'findbugs-3.0.0')
-        if not exists(findbugsLib):
-            tmp = tempfile.mkdtemp(prefix='findbugs-download-tmp', dir=_graal_home)
-            try:
-                findbugsDist = mx.library('FINDBUGS_DIST').get_path(resolve=True)
-                with zipfile.ZipFile(findbugsDist) as zf:
-                    candidates = [e for e in zf.namelist() if e.endswith('/lib/findbugs.jar')]
-                    assert len(candidates) == 1, candidates
-                    libDirInZip = os.path.dirname(candidates[0])
-                    zf.extractall(tmp)
-                shutil.copytree(join(tmp, libDirInZip), findbugsLib)
-            finally:
-                shutil.rmtree(tmp)
-        findbugsJar = join(findbugsLib, 'findbugs.jar')
-    assert exists(findbugsJar)
-    nonTestProjects = [p for p in mx.projects() if not p.name.endswith('.test') and not p.name.endswith('.jtt')]
-    outputDirs = map(mx._cygpathU2W, [p.output_dir() for p in nonTestProjects])
-    javaCompliance = max([p.javaCompliance for p in nonTestProjects])
-    findbugsResults = join(_graal_home, 'findbugs.results')
+def _truffle_gate_runner(args, tasks):
+    with Task('Truffle UnitTests', tasks) as t:
+        if t: unittest(['--suite', 'truffle', '--enable-timing', '--verbose', '--fail-fast'])
 
-    cmd = ['-jar', mx._cygpathU2W(findbugsJar), '-textui', '-low', '-maxRank', '15']
-    if mx.is_interactive():
-        cmd.append('-progress')
-    cmd = cmd + ['-auxclasspath', mx._separatedCygpathU2W(mx.classpath([d.name for d in _jdkDeployedDists] + [p.name for p in nonTestProjects])), '-output', mx._cygpathU2W(findbugsResults), '-exitcode'] + args + outputDirs
-    exitcode = mx.run_java(cmd, nonZeroIsFatal=False, javaConfig=mx.java(javaCompliance))
-    if exitcode != 0:
-        with open(findbugsResults) as fp:
-            mx.log(fp.read())
-    os.unlink(findbugsResults)
-    return exitcode
-
-def checkheaders(args):
-    """check Java source headers against any required pattern"""
-    failures = {}
-    for p in mx.projects():
-        if p.native:
-            continue
+mx_gate.add_gate_runner(_suite, _truffle_gate_runner)
 
-        csConfig = join(mx.project(p.checkstyleProj).dir, '.checkstyle_checks.xml')
-        dom = xml.dom.minidom.parse(csConfig)
-        for module in dom.getElementsByTagName('module'):
-            if module.getAttribute('name') == 'RegexpHeader':
-                for prop in module.getElementsByTagName('property'):
-                    if prop.getAttribute('name') == 'header':
-                        value = prop.getAttribute('value')
-                        matcher = re.compile(value, re.MULTILINE)
-                        for sourceDir in p.source_dirs():
-                            for root, _, files in os.walk(sourceDir):
-                                for name in files:
-                                    if name.endswith('.java') and name != 'package-info.java':
-                                        f = join(root, name)
-                                        with open(f) as fp:
-                                            content = fp.read()
-                                        if not matcher.match(content):
-                                            failures[f] = csConfig
-    for n, v in failures.iteritems():
-        mx.log('{0}: header does not match RegexpHeader defined in {1}'.format(n, v))
-    return len(failures)
-
-def mx_init(suite):
-    commands = {
-        'checkheaders': [checkheaders, ''],
-        'clean': [clean, ''],
-        'findbugs': [findbugs, ''],
-        'maven-install-truffle' : [maven_install_truffle, ''],
-        'jdkhome': [print_jdkhome, ''],
-        'gate' : [gate, '[-options]'],
-        'unittest' : [unittest, '[unittest options] [--] [VM options] [filters...]', _unittestHelpSuffix],
-        'shortunittest' : [shortunittest, '[unittest options] [--] [VM options] [filters...]', _unittestHelpSuffix],
-        'site' : [site, '[-options]'],
-        'sl' : [sl, '[SL args|@VM options]'],
-        'sldebug' : [sldebug, '[SL args|@VM options]'],
-        'jol' : [jol, ''],
-    }
-
-    mx.update_commands(suite, commands)
-
-def mx_post_parse_cmd_line(opts):  #
-    # TODO _minVersion check could probably be part of a Suite in mx?
-    if mx.java().version < _minVersion:
-        mx.abort('Requires Java version ' + str(_minVersion) + ' or greater for JAVA_HOME, got version ' + str(mx.java().version))
-    if _untilVersion and mx.java().version >= _untilVersion:
-        mx.abort('Requires Java version strictly before ' + str(_untilVersion) + ' for JAVA_HOME, got version ' + str(mx.java().version))
-
-    for jdkDist in _jdkDeployedDists:
-        def _close(jdkDeployable):
-            def _install(dist):
-                assert dist.name == jdkDeployable.name, dist.name + "!=" + jdkDeployable.name
-                if not jdkDist.partOfHotSpot:
-                    _installDistInJdks(jdkDeployable)
-            return _install
-        mx.distribution(jdkDist.name).add_update_listener(_close(jdkDist))
+mx.update_commands(_suite, {
+    'maven-install-truffle' : [maven_install_truffle, ''],
+    'sl' : [sl, '[SL args|@VM options]'],
+    'sldebug' : [sldebug, '[SL args|@VM options]'],
+})
--- a/mx.truffle/suite.py	Sat Jul 25 10:25:36 2015 +0200
+++ b/mx.truffle/suite.py	Tue Jul 28 18:33:42 2015 +0200
@@ -1,11 +1,11 @@
 suite = {
-  "mxversion" : "1.0",
+  "mxversion" : "5.0",
   "name" : "truffle",
   "libraries" : {
 
     # ------------- Libraries -------------
 
-      "JLINE" : {
+    "JLINE" : {
       "path" : "lib/jline-2.11.jar",
       "urls" : [
         "http://lafo.ssw.uni-linz.ac.at/graal-external-deps/jline-2.11.jar",
@@ -13,131 +13,6 @@
       ],
       "sha1" : "9504d5e2da5d78237239c5226e8200ec21182040",
     },
-
-    "JUNIT" : {
-      "path" : "lib/junit-4.11.jar",
-      "urls" : [
-        "http://lafo.ssw.uni-linz.ac.at/graal-external-deps/junit-4.11.jar",
-        "https://search.maven.org/remotecontent?filepath=junit/junit/4.11/junit-4.11.jar",
-      ],
-      "sha1" : "4e031bb61df09069aeb2bffb4019e7a5034a4ee0",
-      "eclipse.container" : "org.eclipse.jdt.junit.JUNIT_CONTAINER/4",
-      "sourcePath" : "lib/junit-4.11-sources.jar",
-      "sourceUrls" : [
-        "http://lafo.ssw.uni-linz.ac.at/graal-external-deps/junit-4.11-sources.jar",
-        "https://search.maven.org/remotecontent?filepath=junit/junit/4.11/junit-4.11-sources.jar",
-      ],
-      "sourceSha1" : "28e0ad201304e4a4abf999ca0570b7cffc352c3c",
-      "dependencies" : ["HAMCREST"],
-    },
-
-    "CHECKSTYLE" : {
-      "path" : "lib/checkstyle-6.0-all.jar",
-      "urls" : [
-        "http://lafo.ssw.uni-linz.ac.at/graal-external-deps/checkstyle-6.0-all.jar",
-        "jar:http://sourceforge.net/projects/checkstyle/files/checkstyle/6.0/checkstyle-6.0-bin.zip/download!/checkstyle-6.0/checkstyle-6.0-all.jar",
-      ],
-      "sha1" : "2bedc7feded58b5fd65595323bfaf7b9bb6a3c7a",
-    },
-
-    "HAMCREST" : {
-      "path" : "lib/hamcrest-core-1.3.jar",
-      "urls" : [
-        "http://lafo.ssw.uni-linz.ac.at/graal-external-deps/hamcrest-core-1.3.jar",
-        "https://search.maven.org/remotecontent?filepath=org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar",
-      ],
-      "sha1" : "42a25dc3219429f0e5d060061f71acb49bf010a0",
-      "sourcePath" : "lib/hamcrest-core-1.3-sources.jar",
-      "sourceUrls" : [
-        "http://lafo.ssw.uni-linz.ac.at/graal-external-deps/hamcrest-core-1.3-sources.jar",
-        "https://search.maven.org/remotecontent?filepath=org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3-sources.jar",
-      ],
-      "sourceSha1" : "1dc37250fbc78e23a65a67fbbaf71d2e9cbc3c0b",
-    },
-
-    "HCFDIS" : {
-      "path" : "lib/hcfdis-2.jar",
-      "urls" : ["http://lafo.ssw.uni-linz.ac.at/hcfdis-2.jar"],
-      "sha1" : "bc8b2253436485e9dbaf81771c259ccfa1a24c80",
-    },
-
-    "FINDBUGS_DIST" : {
-      "path" : "lib/findbugs-dist-3.0.0.zip",
-      "urls" : [
-        "http://lafo.ssw.uni-linz.ac.at/graal-external-deps/findbugs-3.0.0.zip",
-        "http://sourceforge.net/projects/findbugs/files/findbugs/3.0.0/findbugs-3.0.0.zip/download",
-      ],
-      "sha1" : "6e56d67f238dbcd60acb88a81655749aa6419c5b",
-    },
-
-    "C1VISUALIZER_DIST" : {
-      "path" : "lib/c1visualizer_2014-04-22.zip",
-      "urls" : ["https://java.net/downloads/c1visualizer/c1visualizer_2014-04-22.zip"],
-      "sha1" : "220488d87affb569b893c7201f8ce5d2b0e03141",
-    },
-
-    "JOL_INTERNALS" : {
-      "path" : "lib/jol-internals.jar",
-      "urls" : ["http://lafo.ssw.uni-linz.ac.at/truffle/jol/jol-internals.jar"],
-      "sha1" : "508bcd26a4d7c4c44048990c6ea789a3b11a62dc",
-    },
-
-    "JACOCOAGENT" : {
-      "path" : "lib/jacocoagent.jar",
-      "urls" : ["http://lafo.ssw.uni-linz.ac.at/jacoco/jacocoagent-0.7.1-1.jar"],
-      "sha1" : "2f73a645b02e39290e577ce555f00b02004650b0",
-    },
-
-    "JACOCOREPORT" : {
-      "path" : "lib/jacocoreport.jar",
-      "urls" : ["http://lafo.ssw.uni-linz.ac.at/jacoco/jacocoreport-0.7.1-2.jar"],
-      "sha1" : "a630436391832d697a12c8f7daef8655d7a1efd2",
-    },
-
-    "DACAPO_SCALA" : {
-      "path" : "lib/dacapo-scala-0.1.0-20120216.jar",
-      "urls" : [
-        "http://lafo.ssw.uni-linz.ac.at/graal-external-deps/dacapo-scala-0.1.0-20120216.jar",
-        "http://repo.scalabench.org/snapshots/org/scalabench/benchmarks/scala-benchmark-suite/0.1.0-SNAPSHOT/scala-benchmark-suite-0.1.0-20120216.103539-3.jar",
-      ],
-      "sha1" : "59b64c974662b5cf9dbd3cf9045d293853dd7a51",
-    },
-
-    "JAVA_ALLOCATION_INSTRUMENTER" : {
-      "path" : "lib/java-allocation-instrumenter.jar",
-      "sourcePath" : "lib/java-allocation-instrumenter.jar",
-      "urls" : ["http://lafo.ssw.uni-linz.ac.at/java-allocation-instrumenter/java-allocation-instrumenter-8f0db117e64e.jar"],
-      "sha1" : "476d9a44cd19d6b55f81571077dfa972a4f8a083",
-      "bootClassPathAgent" : "true",
-    },
-
-    "VECMATH" : {
-      "path" : "lib/vecmath-1.3.1.jar",
-      "urls" : [
-        "http://lafo.ssw.uni-linz.ac.at/graal-external-deps/vecmath-1.3.1.jar",
-        "https://search.maven.org/remotecontent?filepath=java3d/vecmath/1.3.1/vecmath-1.3.1.jar",
-      ],
-      "sha1" : "a0ae4f51da409fa0c20fa0ca59e6bbc9413ae71d",
-    },
-
-    "JMH" : {
-      "path" : "lib/jmh-runner-1.4.2.jar",
-      "sha1" : "f44bffaf237305512002303a306fc5ce3fa63f76",
-      "urls" : ["http://lafo.ssw.uni-linz.ac.at/jmh/jmh-runner-1.4.2.jar"],
-      "annotationProcessor" : "true"
-    },
-
-    "BATIK" : {
-      "path" : "lib/batik-all-1.7.jar",
-      "sha1" : "122b87ca88e41a415cf8b523fd3d03b4325134a3",
-      "urls" : ["http://lafo.ssw.uni-linz.ac.at/graal-external-deps/batik-all-1.7.jar"],
-    }
-},
-
-  "jrelibraries" : {
-    "JFR" : {
-      "jar" : "jfr.jar",
-    }
   },
 
   "projects" : {
@@ -157,11 +32,11 @@
       "sourceDirs" : ["src"],
       "dependencies" : [
         "com.oracle.truffle.api.interop",
-        "JUNIT",
+        "mx:JUNIT",
       ],
       "checkstyle" : "com.oracle.truffle.dsl.processor",
       "javaCompliance" : "1.7",
-      "annotationProcessors" : ["com.oracle.truffle.dsl.processor"],
+      "annotationProcessors" : ["TRUFFLE_DSL_PROCESSOR"],
       "workingSets" : "API,Truffle,Test",
       "jacoco" : "exclude",
     },
@@ -180,11 +55,11 @@
       "sourceDirs" : ["src"],
       "dependencies" : [
         "com.oracle.truffle.dsl.processor",
-        "JUNIT",
+        "mx:JUNIT",
       ],
       "checkstyle" : "com.oracle.truffle.dsl.processor",
       "javaCompliance" : "1.7",
-      "annotationProcessors" : ["com.oracle.truffle.dsl.processor"],
+      "annotationProcessors" : ["TRUFFLE_DSL_PROCESSOR"],
       "workingSets" : "API,Truffle,Codegen,Test",
       "jacoco" : "exclude",
     },
@@ -240,11 +115,11 @@
       "dependencies" : [
         "com.oracle.truffle.api.dsl",
         "com.oracle.truffle.api.interop",
-        "JUNIT"
+        "mx:JUNIT"
       ],
+      "annotationProcessors" : ["TRUFFLE_DSL_PROCESSOR"],
       "checkstyle" : "com.oracle.truffle.api",
       "javaCompliance" : "1.7",
-      "annotationProcessors" : ["com.oracle.truffle.dsl.processor"],
       "workingSets" : "Truffle,Tools",
     },
 
@@ -262,7 +137,7 @@
       "sourceDirs" : ["src"],
       "dependencies" : [
           "com.oracle.truffle.tools",
-          "JUNIT"
+          "mx:JUNIT"
           ],
       "checkstyle" : "com.oracle.truffle.api",
       "javaCompliance" : "1.7",
@@ -288,7 +163,7 @@
         "com.oracle.truffle.tools",
       ],
       "javaCompliance" : "1.7",
-      "annotationProcessors" : ["com.oracle.truffle.dsl.processor"],
+      "annotationProcessors" : ["TRUFFLE_DSL_PROCESSOR"],
       "workingSets" : "Truffle,SimpleLanguage",
     },
 
--- a/mxtool/.project	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>mxtool</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.python.pydev.PyDevBuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.python.pydev.pythonNature</nature>
-	</natures>
-</projectDescription>
--- a/mxtool/.pydevproject	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,10 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<?eclipse-pydev version="1.0"?>
-
-<pydev_project>
-<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
-<path>/mxtool</path>
-</pydev_pathproperty>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
-</pydev_project>
--- a/mxtool/.pylintrc	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,281 +0,0 @@
-[MASTER]
-
-# Specify a configuration file.
-#rcfile=
-
-# Python code to execute, usually for sys.path manipulation such as
-# pygtk.require().
-#init-hook=
-
-# Profiled execution.
-profile=no
-
-# Add files or directories to the blacklist. They should be base names, not
-# paths.
-ignore=CVS
-
-# Pickle collected data for later comparisons.
-persistent=no
-
-# List of plugins (as comma separated values of python modules names) to load,
-# usually to register additional checkers.
-load-plugins=
-
-
-[MESSAGES CONTROL]
-
-# Enable the message, report, category or checker with the given id(s). You can
-# either give multiple identifier separated by comma (,) or put this option
-# multiple time. See also the "--disable" option for examples.
-#enable=
-
-# Disable the message, report, category or checker with the given id(s). You
-# can either give multiple identifiers separated by comma (,) or put this
-# option multiple times (only on the command line, not in the configuration
-# file where it should appear only once).You can also use "--disable=all" to
-# disable everything first and then reenable specific checks. For example, if
-# you want to run only the similarities checker, you can use "--disable=all
-# --enable=similarities". If you want to run only the classes checker, but have
-# no Warning level messages displayed, use"--disable=all --enable=classes
-# --disable=W"
-disable=attribute-defined-outside-init,arguments-differ,
-        bare-except,global-statement,protected-access,redefined-outer-name,
-        unused-argument,star-args,pointless-string-statement,old-style-class,
-        too-many-lines,missing-docstring,no-init,no-self-use,too-many-statements,
-        too-many-locals,too-few-public-methods,too-many-instance-attributes,
-        too-many-arguments,too-many-branches,too-many-public-methods,
-        multiple-statements,abstract-method,F0401,no-member,non-parent-init-called,
-        maybe-no-member
-
-# F0401: http://www.logilab.org/ticket/9386
-
-[REPORTS]
-
-# Set the output format. Available formats are text, parseable, colorized, msvs
-# (visual studio) and html. You can also give a reporter class, eg
-# mypackage.mymodule.MyReporterClass.
-output-format=text
-
-# Put messages in a separate file for each module / package specified on the
-# command line instead of printing them on stdout. Reports (if any) will be
-# written in a file name "pylint_global.[txt|html]".
-files-output=no
-
-# Tells whether to display a full report or only the messages
-reports=no
-
-# Python expression which should return a note less than 10 (10 is the highest
-# note). You have access to the variables errors warning, statement which
-# respectively contain the number of errors / warnings messages and the total
-# number of statements analyzed. This is used by the global evaluation report
-# (RP0004).
-evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
-
-# Add a comment according to your evaluation note. This is used by the global
-# evaluation report (RP0004).
-comment=no
-
-# Template used to display messages. This is a python new-style format string
-# used to format the massage information. See doc for all details
-#msg-template=
-
-
-[BASIC]
-
-# Required attributes for module, separated by a comma
-required-attributes=
-
-# List of builtins function names that should not be used, separated by a comma
-bad-functions=filter,apply,input
-
-# Regular expression which should only match correct module names
-module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
-
-# Regular expression which should only match correct module level names
-const-rgx=[a-zA-Z0-9_]{2,30}$
-
-# Regular expression which should only match correct class names
-class-rgx=[A-Z_][a-zA-Z0-9]+$
-
-# Regular expression which should only match correct function names
-function-rgx=[a-z_][a-zA-Z0-9_]{1,40}$
-
-# Regular expression which should only match correct method names
-method-rgx=[a-z_][a-zA-Z0-9_]{2,40}$
-
-# Regular expression which should only match correct instance attribute names
-attr-rgx=[a-z_][a-zA-Z0-9_]{1,30}$
-
-# Regular expression which should only match correct argument names
-argument-rgx=[a-z_][a-zA-Z0-9_]{0,30}$
-
-# Regular expression which should only match correct variable names
-variable-rgx=[a-z_][a-zA-Z0-9_]{0,30}$
-
-# Regular expression which should only match correct attribute names in class
-# bodies
-class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
-
-# Regular expression which should only match correct list comprehension /
-# generator expression variable names
-inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
-
-# Good variable names which should always be accepted, separated by a comma
-good-names=i,j,k,ex,Run,_
-
-# Bad variable names which should always be refused, separated by a comma
-bad-names=foo,bar,baz,toto,tutu,tata
-
-# Regular expression which should only match function or class names that do
-# not require a docstring.
-no-docstring-rgx=.*
-
-# Minimum line length for functions/classes that require docstrings, shorter
-# ones are exempt.
-docstring-min-length=-1
-
-
-[FORMAT]
-
-# Maximum number of characters on a single line.
-max-line-length=300
-
-# Regexp for a line that is allowed to be longer than the limit.
-ignore-long-lines=^\s*(# )?<?https?://\S+>?$
-
-# Maximum number of lines in a module
-max-module-lines=1000
-
-# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
-# tab).
-indent-string='    '
-
-
-[MISCELLANEOUS]
-
-# List of note tags to take in consideration, separated by a comma.
-notes=FIXME
-
-
-[SIMILARITIES]
-
-# Minimum lines number of a similarity.
-min-similarity-lines=4
-
-# Ignore comments when computing similarities.
-ignore-comments=yes
-
-# Ignore docstrings when computing similarities.
-ignore-docstrings=yes
-
-# Ignore imports when computing similarities.
-ignore-imports=no
-
-
-[TYPECHECK]
-
-# Tells whether missing members accessed in mixin class should be ignored. A
-# mixin class is detected if its name ends with "mixin" (case insensitive).
-ignore-mixin-members=yes
-
-# List of classes names for which member attributes should not be checked
-# (useful for classes with attributes dynamically set).
-ignored-classes=SQLObject
-
-# When zope mode is activated, add a predefined set of Zope acquired attributes
-# to generated-members.
-zope=no
-
-# List of members which are set dynamically and missed by pylint inference
-# system, and so shouldn't trigger E0201 when accessed. Python regular
-# expressions are accepted.
-generated-members=REQUEST,acl_users,aq_parent
-
-
-[VARIABLES]
-
-# Tells whether we should check for unused import in __init__ files.
-init-import=no
-
-# A regular expression matching the beginning of the name of dummy variables
-# (i.e. not used).
-dummy-variables-rgx=_$|dummy
-
-# List of additional names supposed to be defined in builtins. Remember that
-# you should avoid to define new builtins when possible.
-additional-builtins=
-
-
-[CLASSES]
-
-# List of interface methods to ignore, separated by a comma. This is used for
-# instance to not check methods defines in Zope's Interface base class.
-ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
-
-# List of method names used to declare (i.e. assign) instance attributes.
-defining-attr-methods=__init__,__new__,setUp
-
-# List of valid names for the first argument in a class method.
-valid-classmethod-first-arg=cls
-
-# List of valid names for the first argument in a metaclass class method.
-valid-metaclass-classmethod-first-arg=mcs
-
-
-[DESIGN]
-
-# Maximum number of arguments for function / method
-max-args=5
-
-# Argument names that match this expression will be ignored. Default to name
-# with leading underscore
-ignored-argument-names=_.*
-
-# Maximum number of locals for function / method body
-max-locals=15
-
-# Maximum number of return / yield for function / method body
-max-returns=6
-
-# Maximum number of branch for function / method body
-max-branches=12
-
-# Maximum number of statements in function / method body
-max-statements=50
-
-# Maximum number of parents for a class (see R0901).
-max-parents=7
-
-# Maximum number of attributes for a class (see R0902).
-max-attributes=7
-
-# Minimum number of public methods for a class (see R0903).
-min-public-methods=2
-
-# Maximum number of public methods for a class (see R0904).
-max-public-methods=20
-
-
-[IMPORTS]
-
-# Deprecated modules which should not be used, separated by a comma
-deprecated-modules=regsub,TERMIOS,Bastion,rexec
-
-# Create a graph of every (i.e. internal and external) dependencies in the
-# given file (report RP0402 must not be disabled)
-import-graph=
-
-# Create a graph of external dependencies in the given file (report RP0402 must
-# not be disabled)
-ext-import-graph=
-
-# Create a graph of internal dependencies in the given file (report RP0402 must
-# not be disabled)
-int-import-graph=
-
-
-[EXCEPTIONS]
-
-# Exceptions that will emit a warning when being caught. Defaults to
-# "Exception"
-overgeneral-exceptions=Exception
--- a/mxtool/CheckCopyright.java	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,918 +0,0 @@
-/*
- * Copyright (c) 2011, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-import java.io.*;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.util.*;
-import java.util.regex.*;
-
-
-/**
- * A program to check the existence and correctness of the copyright notice on a given set of sources.
- * Sources are defined to be those under management by Mercurial and various options are available
- * to limit the set of sources scanned.
- */
-public class CheckCopyright {
-
-    static class YearInfo {
-
-        final int firstYear;
-        final int lastYear;
-
-        YearInfo(int firstYear, int lastYear) {
-            this.firstYear = firstYear;
-            this.lastYear = lastYear;
-        }
-
-        @Override
-        public boolean equals(Object other) {
-            final YearInfo yearInfo = (YearInfo) other;
-            return yearInfo.firstYear == firstYear && yearInfo.lastYear == lastYear;
-        }
-
-        @Override
-        public int hashCode() {
-            return firstYear ^ lastYear;
-        }
-    }
-
-    static class Info extends YearInfo {
-
-        final String fileName;
-
-        Info(String fileName, int firstYear, int lastYear) {
-            super(firstYear, lastYear);
-            this.fileName = fileName;
-        }
-
-        @Override
-        public String toString() {
-            return fileName + " " + firstYear + ", " + lastYear;
-        }
-    }
-
-    private static abstract class CopyrightHandler {
-    	enum CommentType{
-    		STAR, HASH
-    	}
-
-        private static Map<String, CopyrightHandler> copyrightMap;
-        private static String copyrightFiles = ".*/makefile|.*/Makefile|.*\\.sh|.*\\.bash|.*\\.mk|.*\\.java|.*\\.c|.*\\.h|.*\\.py|.*\\.g|.*\\.r";
-        private static Pattern copyrightFilePattern;
-
-        protected final String suffix;
-        private CopyrightHandler customHandler;
-
-        CopyrightHandler(CommentType commentType) {
-            this.suffix = commentType.name().toLowerCase();
-            initCopyrightMap();
-        }
-
-        void addCustomhandler(CopyrightHandler copyrightHandler) {
-        	this.customHandler = copyrightHandler;
-        }
-
-        /**
-         * Add @code extension to files handled by this {@code CopyrightKind}
-         */
-        protected void updateMap(String extension) {
-        	copyrightMap.put(extension, this);
-        }
-
-        static void addCopyrightFilesPattern(String pattern) {
-            copyrightFiles += "|" + pattern;
-        }
-
-        protected abstract void readCopyrights()  throws IOException;
-
-        protected abstract Matcher getMatcher(String fileName, String fileContent) throws IOException;
-
-        protected abstract String getText(String fileName) throws IOException ;
-
-        protected abstract boolean handlesFile(String fileName);
-
-        /**
-         * Checks that the Oracle copyright year info was correct.
-         * @return {@code false} if the year info was incorrect and was not fixed otherwise return {@code true}
-         * @throws IOException
-         */
-        protected abstract boolean checkYearInfo(String fileName, String fileContent, Matcher matcher, Info info) throws IOException;
-
-        static String getCopyrightText(String fileName) throws IOException {
-        	return getCopyrightHandler(fileName).getText(fileName);
-        }
-
-        private static CopyrightHandler getCopyrightHandler(String fileName) {
-        	initCopyrightMap();
-            if (!copyrightFilePattern.matcher(fileName).matches()) {
-                return null;
-            }
-            CopyrightHandler ck = getDefaultHandler(fileName);
-            if (ck.customHandler != null && ck.customHandler.handlesFile(fileName)) {
-            	return ck.customHandler;
-            } else {
-            	return ck;
-            }
-        }
-
-        private static void initCopyrightMap() {
-            if (copyrightMap == null) {
-                copyrightMap = new HashMap<String, CopyrightHandler>();
-                copyrightFilePattern = Pattern.compile(copyrightFiles);
-            }
-        }
-
-        static CopyrightHandler getDefaultHandler(String fileName) {
-            int index = fileName.lastIndexOf(File.separatorChar);
-            if (index > 0) {
-                fileName = fileName.substring(index + 1);
-            }
-            String ext = "";
-            index = fileName.lastIndexOf('.');
-            if (index > 0) {
-                ext = fileName.substring(index + 1);
-            }
-            if (fileName.equals("makefile")) {
-                ext = "mk";
-            }
-            CopyrightHandler ck = copyrightMap.get(ext);
-            assert ck != null : fileName;
-        	return ck;
-        }
-
-        protected String readCopyright(InputStream is) throws IOException {
-            byte[] b = new byte[16384];
-            int n = is.read(b);
-            is.close();
-            return new String(b, 0, n);
-    	}
-
-    }
-
-    private static class DefaultCopyrightHandler extends CopyrightHandler {
-        private static String ORACLE_COPYRIGHT = "oracle.copyright";
-        private static String ORACLE_COPYRIGHT_REGEX = "oracle.copyright.regex";
-
-        private String copyrightRegex;
-        private String copyright;
-        Pattern copyrightPattern;
-
-        DefaultCopyrightHandler(CopyrightHandler.CommentType commentType) throws IOException {
-    		super(commentType);
-    		if (commentType == CopyrightHandler.CommentType.STAR) {
-    			updateMap("java");
-    			updateMap("c");
-    			updateMap("h");
-    			updateMap("g");
-    		} else {
-    			updateMap("r");
-    			updateMap("R");
-    			updateMap("py");
-    			updateMap("sh");
-    			updateMap("mk");
-    			updateMap("bash");
-    			updateMap("");
-    		}
-    		readCopyrights();
-    	}
-
-    	private String readCopyright(String name) throws IOException {
-    		String copyRightDir = COPYRIGHT_DIR.getValue();
-    		String fileName = "copyrights/" + name + "." + suffix;
-    		String copyrightPath;
-    		if (copyRightDir != null) {
-    			copyrightPath = new File(new File(copyRightDir), fileName).getAbsolutePath();
-    		} else {
-    			URL url = CheckCopyright.class.getResource(fileName);
-    			try {
-    			copyrightPath = url.toURI().getPath();
-    			} catch (URISyntaxException ex) {
-    				throw new IOException(ex);
-    			}
-    		}
-            InputStream is = new FileInputStream(copyrightPath);
-            return readCopyright(is);
-        }
-
-    	@Override
-        protected void readCopyrights()  throws IOException {
-         	copyright = readCopyright(ORACLE_COPYRIGHT);
-         	copyrightRegex =  readCopyright(ORACLE_COPYRIGHT_REGEX);
-         	copyrightPattern = Pattern.compile(copyrightRegex, Pattern.DOTALL);
-        }
-
-    	@Override
-    	protected Matcher getMatcher(String fileName, String fileContent) {
-            return copyrightPattern.matcher(fileContent);
-    	}
-
-    	@Override
-        protected String getText(String fileName) {
-    		return copyright;
-    	}
-
-    	@Override
-    	protected boolean handlesFile(String fileName) {
-    		return true;
-    	}
-
-    	/**
-    	 * Check the year info against the copyright header.
-    	 * N.B. In the case of multiple matching groups, only the last group is checked.
-    	 * I.e., only the last lines containing year info is checked/updated.
-    	 */
-    	@Override
-        protected boolean checkYearInfo(String fileName, String fileContent, Matcher matcher, Info info) throws IOException {
-            int yearInCopyright;
-            int yearInCopyrightIndex;
-            int groupCount = matcher.groupCount();
-            String yearInCopyrightString = matcher.group(groupCount);
-            yearInCopyright = Integer.parseInt(yearInCopyrightString);
-            yearInCopyrightIndex = matcher.start(groupCount);
-            if (yearInCopyright != info.lastYear) {
-                System.out.println(fileName + " copyright last modified year " + yearInCopyright + ", hg last modified year " + info.lastYear);
-                if (FIX.getValue()) {
-                    // Use currentYear as that is what it will be when it's checked in!
-                    System.out.println("updating last modified year of " + fileName + " to " + info.lastYear);
-                    // If the previous copyright only specified a single (initial) year, we convert it to the pair form
-                    String newContent = fileContent.substring(0, yearInCopyrightIndex);
-                    if (matcher.group(groupCount - 1) == null) {
-                    	// single year form
-                    	newContent += yearInCopyrightString + ", ";
-                    }
-                    newContent += info.lastYear + fileContent.substring(yearInCopyrightIndex + 4);
-                    final FileOutputStream os = new FileOutputStream(fileName);
-                    os.write(newContent.getBytes());
-                    os.close();
-                    return true;
-                } else {
-                	return false;
-                }
-            }
-            return true;
-    	}
-
-    }
-
-    private static class CustomCopyrightHandler extends CopyrightHandler {
-    	private Map<String, String> overrides = new HashMap<String, String>();
-    	private CopyrightHandler defaultHandler;
-
-    	CustomCopyrightHandler(CopyrightHandler.CommentType commentType, CopyrightHandler defaultHandler) {
-    		super(commentType);
-    		this.defaultHandler = defaultHandler;
-    	}
-
-    	void addFile(String fileName, String copyright) {
-    		overrides.put(fileName, copyright);
-    	}
-
-		@Override
-		protected void readCopyrights() throws IOException {
-		}
-
-		@Override
-		protected Matcher getMatcher(String fileName, String fileContent) throws IOException {
-			String copyright = overrides.get(fileName);
-			assert copyright != null : fileName;
-			try (InputStream fs = new FileInputStream(copyright + "." + suffix + ".regex")) {
-				return Pattern.compile(readCopyright(fs), Pattern.DOTALL).matcher(fileContent);
-			}
-		}
-
-		@Override
-		protected String getText(String fileName) throws IOException {
-			String copyright = overrides.get(fileName);
-			assert copyright != null : fileName;
-			try (InputStream fs = new FileInputStream(copyright + "." + suffix)) {
-				return readCopyright(fs);
-			}
-		}
-
-		@Override
-		protected boolean handlesFile(String fileName) {
-			return overrides.get(fileName) != null;
-		}
-
-		@Override
-        protected boolean checkYearInfo(String fileName, String fileContent, Matcher matcher, Info info) throws IOException {
-			// This is a bit tacky
-			String copyright = overrides.get(fileName);
-			if (copyright.endsWith("no.copyright")) {
-				return true;
-			}
-			return defaultHandler.checkYearInfo(fileName, fileContent, matcher, info);
-		}
-    }
-
-	private static void initCopyrightKinds() throws IOException {
-		CopyrightHandler starHandler = new DefaultCopyrightHandler(CopyrightHandler.CommentType.STAR);
-		CopyrightHandler hashHandler = new DefaultCopyrightHandler(CopyrightHandler.CommentType.HASH);
-
-		String customCopyrightDir = CUSTOM_COPYRIGHT_DIR.getValue();
-		if (customCopyrightDir != null) {
-			CustomCopyrightHandler customStarHandler = new CustomCopyrightHandler(CopyrightHandler.CommentType.STAR, starHandler);
-			CustomCopyrightHandler customHashHandler = new CustomCopyrightHandler(CopyrightHandler.CommentType.HASH, hashHandler);
-			starHandler.addCustomhandler(customStarHandler);
-			hashHandler.addCustomhandler(customHashHandler);
-
-			File overrides = new File(new File(customCopyrightDir), "overrides");
-			if (overrides.exists()) {
-				ArrayList<String> lines = new ArrayList<>();
-				boolean changed = false;
-				try (BufferedReader br = new BufferedReader(new FileReader(
-						overrides))) {
-					while (true) {
-						String line = br.readLine();
-						if (line == null) {
-							break;
-						}
-						if (line.length() == 0 || line.startsWith("#")) {
-							lines.add(line);
-							continue;
-						}
-						String[] parts = line.split(",");
-						// filename,copyright-file
-						CopyrightHandler defaultHandler = CopyrightHandler.getDefaultHandler(parts[0]);
-						if (defaultHandler == null) {
-							System.err.println("no default copyright handler for: " + parts[0]);
-							System.exit(1);
-						}
-						if (!new File(parts[0]).exists()) {
-							System.err.printf("file %s in overrides file does not exist", parts[0]);
-							if (FIX.getValue()) {
-								System.err.print(" - removing");
-								line = null;
-								changed = true;
-							}
-							System.err.println();
-						}
-						if (line != null) {
-							lines.add(line);
-						}
-						CustomCopyrightHandler customhandler = (CustomCopyrightHandler) defaultHandler.customHandler;
-						customhandler.addFile(parts[0], new File(new File(customCopyrightDir), parts[1]).getAbsolutePath());
-					}
-				}
-				if (changed) {
-					try (BufferedWriter bw = new BufferedWriter(new FileWriter(
-							overrides))) {
-						for (String line : lines) {
-							bw.write(line);
-							bw.write('\n');
-						}
-					}
-				}
-			}
-		}
-	}
-
-    private static int currentYear = Calendar.getInstance().get(Calendar.YEAR);
-    private static Options options = new Options();
-    private static Option<Boolean> help = options.newBooleanOption("help", false, "Show help message and exit.");
-    private static Option<String> COPYRIGHT_DIR = options.newStringOption("copyright-dir", null, "override default location of copyright files");
-    private static Option<List<String>> FILES_TO_CHECK = options.newStringListOption("files", null, "list of files to check");
-    private static Option<String> FILE_LIST = options.newStringOption("file-list", null, "file containing list of files to check");
-    private static Option<Boolean> DIR_WALK = options.newBooleanOption("list-dir", false, "check all files in directory tree requiring a copyright (ls -R)");
-    private static Option<Boolean> HG_ALL = options.newBooleanOption("hg-all", false, "check all hg managed files requiring a copyright (hg status --all)");
-    private static Option<Boolean> HG_MODIFIED = options.newBooleanOption("hg-modified", false, "check all modified hg managed files requiring a copyright (hg status)");
-    private static Option<Boolean> HG_OUTGOING = options.newBooleanOption("hg-outgoing", false, "check outgoing hg managed files requiring a copyright (hg outgoing)");
-    private static Option<String> HG_LOG = options.newStringOption("hg-last", "0", "check hg managed files requiring a copyright in last N changesets (hg log -l N)");
-    private static Option<List<String>> PROJECT = options.newStringListOption("projects", null, "filter files to specific projects");
-    private static Option<String> OUTGOING_REPO = options.newStringOption("hg-repo", null, "override outgoing repository");
-    private static Option<Boolean> EXHAUSTIVE = options.newBooleanOption("hg-exhaustive", false, "check all hg managed files");
-    private static Option<Boolean> FIX = options.newBooleanOption("fix", false, "fix all copyright errors");
-    private static Option<String> FILE_PATTERN = options.newStringOption("file-pattern", null, "append additional file patterns for copyright checks");
-    private static Option<Boolean> REPORT_ERRORS = options.newBooleanOption("report-errors", false, "report non-fatal errors");
-    private static Option<Boolean> HALT_ON_ERROR = options.newBooleanOption("halt-on-error", false, "continue after normally fatal error");
-    private static Option<String> HG_PATH = options.newStringOption("hg-path", "hg", "path to hg executable");
-    private static Option<Boolean> VERBOSE = options.newBooleanOption("verbose", false, "verbose output");
-    private static Option<Boolean> VERY_VERBOSE = options.newBooleanOption("very-verbose", false, "very verbose output");
-    private static Option<String> CUSTOM_COPYRIGHT_DIR = options.newStringOption("custom-copyright-dir", null, "file containing filenames with custom copyrights");
-
-    private static String CANNOT_FOLLOW_FILE = "abort: cannot follow";
-    private static String hgPath;
-    private static boolean error;
-//    private static File workSpaceDirectory;
-    private static boolean verbose;
-    private static boolean veryVerbose;
-
-    public static void main(String[] args) {
-        // parse the arguments
-        options.parseArguments(args);
-        if (help.getValue()) {
-            options.printHelp();
-            return;
-        }
-
-        verbose = VERBOSE.getValue();
-        veryVerbose = VERY_VERBOSE.getValue();
-
-        hgPath = HG_PATH.getValue();
-
-        if (FILE_PATTERN.getValue() != null) {
-            CopyrightHandler.addCopyrightFilesPattern(FILE_PATTERN.getValue());
-        }
-
-        try {
-           initCopyrightKinds();
-            List<String> filesToCheck = null;
-            if (HG_ALL.getValue()) {
-                filesToCheck = getAllFiles(true);
-            } else if (HG_OUTGOING.getValue()) {
-                filesToCheck = getOutgoingFiles();
-            } else if (HG_MODIFIED.getValue()) {
-                filesToCheck = getAllFiles(false);
-            } else if (Integer.parseInt(HG_LOG.getValue()) > 0) {
-                filesToCheck = getLastNFiles(Integer.parseInt(HG_LOG.getValue()));
-            } else if (FILE_LIST.getValue() != null) {
-                filesToCheck = readFileList(FILE_LIST.getValue());
-            } else if (DIR_WALK.getValue()) {
-            	filesToCheck = getDirWalkFiles();
-            } else if (FILES_TO_CHECK.getValue() != null) {
-                filesToCheck = FILES_TO_CHECK.getValue();
-            } else {
-            	// no option set, default to HG_ALL
-            	filesToCheck = getAllFiles(true);
-            }
-            if (filesToCheck != null && filesToCheck.size() > 0) {
-                processFiles(filesToCheck);
-            } else {
-                System.out.println("nothing to check");
-            }
-            System.exit(error ? 1 : 0);
-        } catch (Exception ex) {
-            System.err.println("processing failed: " + ex);
-            ex.printStackTrace();
-        }
-    }
-
-    private static void processFiles(List<String> fileNames) throws Exception {
-        final List<String> projects = PROJECT.getValue();
-        Calendar cal = Calendar.getInstance();
-        for (String fileName : fileNames) {
-            if (projects == null || isInProjects(fileName, projects)) {
-            	File file = new File(fileName);
-            	if (file.isDirectory()) {
-            		continue;
-            	}
-            	if (verbose) {
-            		System.out.println("checking " + fileName);
-            	}
-                try {
-                	Info info = null;
-                	if (DIR_WALK.getValue()) {
-                		info = getFromLastModified(cal, fileName);
-                	} else {
-                		final List<String> logInfo = hglog(fileName);
-                		if (logInfo.size() == 0) {
-                			// an added file, so go with last modified
-                			info = getFromLastModified(cal, fileName);
-                		} else {
-                			info = getInfo(fileName, true, logInfo);
-                		}
-                	}
-                    checkFile(fileName, info);
-                } catch (Exception e) {
-                    System.err.format("COPYRIGHT CHECK WARNING: error while processing %s: %s%n", fileName, e.getMessage());
-                }
-            }
-        }
-    }
-
-    private static Info getFromLastModified(Calendar cal, String fileName) {
-		File file = new File(fileName);
-		cal.setTimeInMillis(file.lastModified());
-		int year = cal.get(Calendar.YEAR);
-		return new Info(fileName, year, year);
-    }
-
-    private static boolean isInProjects(String fileName, List<String> projects) {
-        final int ix = fileName.indexOf(File.separatorChar);
-        if (ix < 0) {
-            return false;
-        }
-        final String fileProject = fileName.substring(0, ix);
-        for (String project : projects) {
-            if (fileProject.equals(project)) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    private static List<String> readFileList(String fileListName) throws IOException {
-        final List<String> result = new ArrayList<String>();
-        BufferedReader b = null;
-        try {
-            b = new BufferedReader(new FileReader(fileListName));
-            while (true) {
-                final String fileName = b.readLine();
-                if (fileName == null) {
-                    break;
-                }
-                if (fileName.length() == 0) {
-                    continue;
-                }
-                result.add(fileName);
-            }
-        } finally {
-            if (b != null) {
-                b.close();
-            }
-        }
-        return result;
-    }
-
-    private static Info getInfo(String fileName, boolean lastOnly, List<String> logInfo) {
-        // process sequence of changesets
-        int lastYear = 0;
-        int firstYear = 0;
-        int ix = 0;
-
-        while (ix < logInfo.size()) {
-        	Map<String, String> tagMap = new HashMap<>();
-        	ix = getChangeset(logInfo, ix, tagMap);
-        	String date = tagMap.get("date");
-            assert date != null;
-            final int csYear = getYear(date);
-            if (lastYear == 0) {
-                lastYear = csYear;
-                firstYear = lastYear;
-            } else {
-                firstYear = csYear;
-            }
-            // if we only want the last modified year, quit now
-            if (lastOnly) {
-                break;
-            }
-
-        }
-
-        if (HG_MODIFIED.getValue()) {
-            // We are only looking at modified and, therefore, uncommitted files.
-            // This means that the lastYear value will be the current year once the
-            // file is committed, so that is what we want to check against.
-            lastYear = currentYear;
-        }
-        return new Info(fileName, firstYear, lastYear);
-    }
-
-    /**
-     * Process all the changeset data, storing in {@outMap}.
-     * Return updated value of {@code ix}.
-     */
-    private static int getChangeset(List<String> logInfo, int ixx, Map<String, String> outMap) {
-    	int ix = ixx;
-    	String s = logInfo.get(ix++);
-    	while (s.length() > 0) {
-    		int cx = s.indexOf(':');
-    		String tag = s.substring(0, cx);
-    		String value = s.substring(cx + 1);
-    		outMap.put(tag, value);
-    		s = logInfo.get(ix++);
-    	}
-    	return ix;
-    }
-
-    private static int getYear(String dateLine) {
-        final String[] parts = dateLine.split(" ");
-        assert parts[parts.length - 2].startsWith("20");
-        return Integer.parseInt(parts[parts.length - 2]);
-    }
-
-    private static void checkFile(String c, Info info) throws IOException {
-        String fileName = info.fileName;
-        File file = new File(fileName);
-        if (!file.exists()) {
-            System.err.println("COPYRIGHT CHECK WARNING: file " + file + " doesn't exist");
-            return;
-        }
-        int fileLength = (int) file.length();
-        byte[] fileContentBytes = new byte[fileLength];
-        FileInputStream is = new FileInputStream(file);
-        is.read(fileContentBytes);
-        is.close();
-        final String fileContent = new String(fileContentBytes);
-        CopyrightHandler copyrightHandler = CopyrightHandler.getCopyrightHandler(fileName);
-        if (file.getName().equals("Makefile")) {
-        	System.console();
-        }
-        if (copyrightHandler != null) {
-            Matcher copyrightMatcher = copyrightHandler.getMatcher(fileName, fileContent);
-            if (copyrightMatcher.matches()) {
-            	error = error | !copyrightHandler.checkYearInfo(fileName, fileContent, copyrightMatcher, info);
-            } else {
-            	// If copyright is missing, insert it, otherwise user has to manually fix existing copyright.
-				if (!fileContent.contains("Copyright")) {
-					System.out.print("file " + fileName + " has missing copyright");
-					if (FIX.getValue()) {
-						final FileOutputStream os = new FileOutputStream(file);
-						os.write(CopyrightHandler.getCopyrightText(fileName)
-								.getBytes());
-						os.write(fileContentBytes);
-						os.close();
-						System.out.println("...fixed");
-					} else {
-						System.out.println();
-						error = true;
-					}
-				} else {
-					System.out.println("file " + fileName + " has malformed copyright" + (FIX.getValue() ? " not fixing" : ""));
-					error = true;
-				}
-            }
-        } else if (EXHAUSTIVE.getValue()) {
-            System.out.println("ERROR: file " + fileName + " has no copyright");
-            error = true;
-        }
-    }
-
-
-    private static List<String> hglog(String fileName) throws Exception {
-        final String[] cmd = new String[] {hgPath, "log", "-f", fileName};
-        return exec(null, cmd, true);
-    }
-
-    private static List<String> getLastNFiles(int n) throws Exception {
-        final String[] cmd = new String[] {hgPath, "log", "-v", "-l", Integer.toString(n)};
-        return getFilesFiles(exec(null, cmd, false));
-    }
-
-    private static List<String> getAllFiles(boolean all) throws Exception {
-        final String[] cmd;
-        if (HG_MODIFIED.getValue()) {
-            cmd = new String[] {hgPath,  "status"};
-        } else {
-            cmd = new String[] {hgPath,  "status",  "--all"};
-        }
-        List<String> output = exec(null, cmd, true);
-        final List<String> result = new ArrayList<String>(output.size());
-        for (String s : output) {
-            final char ch = s.charAt(0);
-            if (!(ch == 'R' || ch == 'I' || ch == '?' ||  ch == '!')) {
-                result.add(s.substring(2));
-            }
-        }
-        return result;
-    }
-
-    private static List<String> getOutgoingFiles() throws Exception {
-        final String[] cmd;
-        if (OUTGOING_REPO.getValue() == null) {
-            cmd = new String[] {hgPath,  "-v", "outgoing"};
-        } else {
-            cmd = new String[] {hgPath,  "-v", "outgoing", OUTGOING_REPO.getValue()};
-        }
-
-        final List<String> output = exec(null, cmd, false); // no outgoing exits with result 1
-        return getFilesFiles(output);
-    }
-
-    private static List<String> getFilesFiles(List<String> output) {
-        // there may be multiple changesets so merge the "files:"
-        final Map<String, String> outSet = new TreeMap<String, String>();
-        for (String s : output) {
-            if (s.startsWith("files:")) {
-                int ix = s.indexOf(' ');
-                while (ix < s.length() && s.charAt(ix) == ' ') {
-                    ix++;
-                }
-                final String[] files = s.substring(ix).split(" ");
-                for (String file : files) {
-                    outSet.put(file, file);
-                }
-            }
-        }
-        return new ArrayList<String>(outSet.values());
-    }
-
-    private static List<String> getDirWalkFiles() {
-    	File cwd = new File(System.getProperty("user.dir"));
-    	ArrayList<String> result = new ArrayList<String>();
-    	getDirWalkFiles(cwd, result);
-    	// remove "user.dir" prefix to make files relative as per hg
-    	String cwdPath = cwd.getAbsolutePath() + '/';
-    	for (int i = 0; i < result.size(); i++) {
-    		String path = result.get(i);
-    		result.set(i, path.replace(cwdPath, ""));
-    	}
-    	return result;
-    }
-
-    private static void getDirWalkFiles(File dir, ArrayList<String> list) {
-    	File[] files = dir.listFiles();
-    	for (File file : files) {
-    		if (ignoreFile(file.getName())) {
-    			continue;
-    		}
-    		if (file.isDirectory()) {
-    				getDirWalkFiles(file, list);
-    		} else {
-    			list.add(file.getAbsolutePath());
-    		}
-    	}
-    }
-
-    private static final String IGNORE_LIST = "\\.hg|.*\\.class|bin|src_gen";
-    private static final Pattern ignorePattern = Pattern.compile(IGNORE_LIST);
-
-    private static boolean ignoreFile(String name) {
-    	return ignorePattern.matcher(name).matches();
-    }
-
-    private static List<String> exec(File workingDir, String[] command, boolean failOnError) throws IOException, InterruptedException {
-        List<String> result = new ArrayList<String>();
-        if (veryVerbose) {
-            System.out.println("Executing process in directory: " + workingDir);
-            for (String c : command) {
-                System.out.println("  " + c);
-            }
-        }
-        final Process process = Runtime.getRuntime().exec(command, null, workingDir);
-        try {
-            result = readOutput(process.getInputStream());
-            final int exitValue = process.waitFor();
-            if (exitValue != 0) {
-                final List<String> errorResult = readOutput(process.getErrorStream());
-                if (REPORT_ERRORS.getValue()) {
-                    System.err.print("execution of command: ");
-                    for (String c : command) {
-                        System.err.print(c);
-                        System.err.print(' ');
-                    }
-                    System.err.println("failed with result " + exitValue);
-                    for (String e : errorResult) {
-                        System.err.println(e);
-                    }
-                }
-                if (failOnError && HALT_ON_ERROR.getValue()) {
-                	if (!cannotFollowNonExistentFile(errorResult)) {
-                		throw new Error("terminating");
-                	}
-                }
-            }
-        } finally {
-            process.destroy();
-        }
-        return result;
-    }
-
-    private static boolean cannotFollowNonExistentFile(List<String> errorResult) {
-        return errorResult.size() == 1 && errorResult.get(0).startsWith(CANNOT_FOLLOW_FILE);
-    }
-
-    private static List<String> readOutput(InputStream is) throws IOException {
-        final List<String> result = new ArrayList<String>();
-        BufferedReader bs = null;
-        try {
-            bs = new BufferedReader(new InputStreamReader(is));
-            while (true) {
-                final String line = bs.readLine();
-                if (line == null) {
-                    break;
-                }
-                result.add(line);
-            }
-        } finally {
-            if (bs != null) {
-                bs.close();
-            }
-        }
-        return result;
-    }
-
-    private static class Options {
-    	private static Map<String, Option<?>> optionMap  = new TreeMap<>();
-
-    	private Option<Boolean> newBooleanOption(String name, boolean defaultValue, String help) {
-    		Option<Boolean> option = new Option<Boolean>(name, help, defaultValue, false, false);
-    		optionMap.put(key(name), option);
-    		return option;
-    	}
-
-    	private Option<String> newStringOption(String name, String defaultValue, String help) {
-    		Option<String> option = new Option<String>(name, help, defaultValue);
-    		optionMap.put(key(name), option);
-    		return option;
-    	}
-
-    	private Option<List<String>> newStringListOption(String name, List<String> defaultValue, String help) {
-    		Option<List<String>> option = new Option<List<String>>(name, help, defaultValue, true, true);
-    		optionMap.put(key(name), option);
-    		return option;
-    	}
-
-    	private static String key(String name) {
-    		return "--" + name;
-    	}
-
-    	void parseArguments(String[] args) {
-            for (int i = 0; i < args.length; i++) {
-            	final String arg = args[i];
-            	if (arg.startsWith("--")) {
-            		Option<?> option = optionMap.get(arg);
-            		if (option == null || (option.consumesNext() && i == args.length - 1)) {
-            			System.out.println("usage:");
-            			printHelp();
-            			System.exit(1);
-            		}
-            		if (option.consumesNext()) {
-            			i++;
-            			option.setValue(args[i]);
-            		} else {
-            			option.setValue(true);
-            		}
-            	}
-            }
-    	}
-
-        void printHelp() {
-        	int maxKeyLen = 0;
-        	for (Map.Entry<String, Option<?>> entrySet : optionMap.entrySet()) {
-        		int l = entrySet.getKey().length();
-        		if (l > maxKeyLen) {
-        			maxKeyLen = l;
-        		}
-        	}
-        	for (Map.Entry<String, Option<?>> entrySet : optionMap.entrySet()) {
-        		String key = entrySet.getKey();
-        		System.out.printf("  %s", key);
-        		for (int i = 0; i < maxKeyLen - key.length(); i++) {
-        			System.out.print(' ');
-        		}
-        		System.out.printf("   %s%n", entrySet.getValue().help);
-        	}
-        }
-}
-
-    private static class Option<T> {
-    	private final String name;
-    	private final String help;
-    	private final boolean consumesNext;
-    	private final boolean isList;
-    	private T value;
-
-    	Option(String name, String help, T defaultValue, boolean consumesNext, boolean isList) {
-    		this.name = name;
-    		this.help = help;
-    		this.value = defaultValue;
-    		this.consumesNext = consumesNext;
-    		this.isList = isList;
-
-    	}
-
-    	Option(String name, String help, T defaultValue) {
-    		this(name, help, defaultValue, true, false);
-    	}
-
-        T getValue() {
-    		return value;
-    	}
-
-        boolean consumesNext() {
-        	return consumesNext;
-        }
-
- 	   @SuppressWarnings("unchecked")
-       void setValue(boolean value) {
-        	this.value = (T) new Boolean(value);
-        }
-
- 	   @SuppressWarnings("unchecked")
-        void setValue(String value) {
- 		   if (isList) {
- 			   String[] parts = value.split(",");
- 			   this.value = (T) Arrays.asList(parts);
- 		   } else {
- 			   this.value = (T) value;
- 		   }
-        }
-
- 	   @SuppressWarnings("unused")
- 	   String getName() {
- 		   return name;
- 	   }
-    }
-
-}
--- a/mxtool/ClasspathDump.java	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-public class ClasspathDump {
-    public static void main(String[] args) {
-        System.out.print(System.getProperty("sun.boot.class.path"));
-        System.out.print("|");
-        System.out.print(System.getProperty("java.ext.dirs"));
-        System.out.print("|");
-        System.out.print(System.getProperty("java.endorsed.dirs"));
-    }
-}
\ No newline at end of file
--- a/mxtool/URLConnectionDownload.java	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,163 +0,0 @@
-/*
- * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-import java.io.*;
-import java.net.*;
-import java.util.*;
-import java.util.regex.*;
-
-/**
- * Downloads content from a given URL to a given file.
- *
- * @param path where to write the content
- * @param urls the URLs to try, stopping after the first successful one
- */
-public class URLConnectionDownload {
-    /**
-     * Iterate over list of environment variable to find one that correctly specify an proxy.
-     *
-     * @param propPrefix indicates which proxy property to set (i.e., http or https)
-     * @param proxyEnvVariableNames list of environment variable
-     * @return a string specifying the proxy url
-     */
-    private static String setProxy(String[] proxyEnvVariableNames, String propPrefix) {
-        String proxy = null;
-        String proxyEnvVar = "";
-        for (String envvar : proxyEnvVariableNames) {
-            proxy = System.getenv(envvar);
-            if (proxy != null) {
-                proxyEnvVar = envvar;
-                break;
-            }
-        }
-        if (proxy != null) {
-            Pattern p = Pattern.compile("(?:http://)?([^:]+)(:\\d+)?");
-            Matcher m = p.matcher(proxy);
-            if (m.matches()) {
-                String host = m.group(1);
-                String port = m.group(2);
-                System.setProperty(propPrefix + ".proxyHost", host);
-                if (port != null) {
-                    port = port.substring(1); // strip ':'
-                    System.setProperty(propPrefix + ".proxyPort", port);
-                }
-                return proxy;
-            } else {
-                System.err.println("Value of " + proxyEnvVar + " is not valid:  " + proxy);
-            }
-        } else {
-            System.err.println("** If behind a firewall without direct internet access, use the " + proxyEnvVariableNames[0] + "  environment variable (e.g. 'env " + proxyEnvVariableNames[0] +
-                            "=proxy.company.com:80 max ...') or download manually with a web browser.");
-        }
-        return "";
-    }
-
-    /**
-     * Downloads content from a given URL to a given file.
-     *
-     * @param args arg[0] is the path where to write the content. The remainder of args are the URLs
-     *            to try, stopping after the first successful one
-     */
-    public static void main(String[] args) {
-        File path = new File(args[0]);
-        boolean verbose = args[1].equals("-v");
-        int offset = verbose ? 2 : 1;
-        String[] urls = new String[args.length - offset];
-        System.arraycopy(args, offset, urls, 0, urls.length);
-
-        File parent = path.getParentFile();
-        makeDirectory(parent);
-
-        // Enable use of system proxies
-        System.setProperty("java.net.useSystemProxies", "true");
-
-        // Set standard proxy if any
-        String proxy = setProxy(new String[]{"HTTP_PROXY", "http_proxy"}, "http");
-        // Set proxy for secure http if explicitely set, default to http proxy otherwise
-        String secureProxy = setProxy(new String[]{"HTTPS_PROXY", "https_proxy", "HTTP_PROXY", "http_proxy"}, "https");
-        String proxyMsg = "";
-        if (secureProxy.length() > 0 && proxy.length() > 0 && !secureProxy.equals(proxy)) {
-            proxyMsg = " via " + proxy + " / " + secureProxy;
-        } else if (proxy.length() > 0) {
-            proxyMsg = " via " + proxy;
-        } else if (secureProxy.length() > 0) {
-            proxyMsg = " via " + secureProxy;
-        }
-
-        for (String s : urls) {
-            try {
-                while (true) {
-                    System.err.println("Downloading " + s + " to  " + path + proxyMsg);
-                    URL url = new URL(s);
-                    URLConnection conn = url.openConnection();
-                    // 10 second timeout to establish connection
-                    conn.setConnectTimeout(10000);
-
-                    if (conn instanceof HttpURLConnection) {
-                        // HttpURLConnection per default follows redirections,
-                        // but not if it changes the protocol (e.g. http ->
-                        // https). While this is a sane default, in our
-                        // situation it's okay to follow a protocol transition.
-                        HttpURLConnection httpconn = (HttpURLConnection) conn;
-                        switch (httpconn.getResponseCode()) {
-                            case HttpURLConnection.HTTP_MOVED_PERM:
-                            case HttpURLConnection.HTTP_MOVED_TEMP:
-                                System.err.println("follow redirect...");
-                                s = httpconn.getHeaderField("Location");
-                                continue;
-                        }
-                    }
-                    InputStream in = conn.getInputStream();
-                    int size = conn.getContentLength();
-                    FileOutputStream out = new FileOutputStream(path);
-                    int read = 0;
-                    byte[] buf = new byte[8192];
-                    int n = 0;
-                    while ((read = in.read(buf)) != -1) {
-                        n += read;
-                        if (verbose) {
-                            long percent = ((long) n * 100 / size);
-                            System.err.print("\r " + n + " bytes " + (size == -1 ? "" : " (" + percent + "%)"));
-                        }
-                        out.write(buf, 0, read);
-                    }
-                    System.err.println();
-                    out.close();
-                    in.close();
-                    return;
-                }
-            } catch (MalformedURLException e) {
-                throw new Error("Error in URL " + s, e);
-            } catch (IOException e) {
-                System.err.println("Error reading from  " + s + ":  " + e);
-                path.delete();
-            }
-        }
-        throw new Error("Could not download content to  " + path + " from  " + Arrays.toString(urls));
-    }
-
-    private static void makeDirectory(File directory) {
-        if (!directory.exists() && !directory.mkdirs()) {
-            throw new Error("Could not make directory " + directory);
-        }
-    }
-}
--- a/mxtool/copyrights/oracle.copyright.hash	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,22 +0,0 @@
-#
-# Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
-# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
-#
-# This code is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License version 2 only, as
-# published by the Free Software Foundation.
-#
-# This code is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
-# version 2 for more details (a copy is included in the LICENSE file that
-# accompanied this code).
-#
-# You should have received a copy of the GNU General Public License version
-# 2 along with this work; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
-# or visit www.oracle.com if you need additional information or have any
-# questions.
-#
--- a/mxtool/copyrights/oracle.copyright.regex.hash	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-(?:#!.*\n#\n#\ -*\n)?#\n# Copyright \(c\) (?:(20[0-9][0-9]), )?(20[0-9][0-9]), Oracle and/or its affiliates. All rights reserved.\n# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.\n#\n# This code is free software; you can redistribute it and/or modify it\n# under the terms of the GNU General Public License version 2 only, as\n# published by the Free Software Foundation.\n#\n# This code is distributed in the hope that it will be useful, but WITHOUT\n# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or\n# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License\n# version 2 for more details \(a copy is included in the LICENSE file that\n# accompanied this code\).\n#\n# You should have received a copy of the GNU General Public License version\n# 2 along with this work; if not, write to the Free Software Foundation,\n# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.\n#\n# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA\n# or visit www.oracle.com if you need additional information or have any\n# questions.\n#\n.*
--- a/mxtool/copyrights/oracle.copyright.regex.star	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-/\*\n \* Copyright \(c\) (?:(20[0-9][0-9]), )?(20[0-9][0-9]), Oracle and/or its affiliates. All rights reserved.\n \* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.\n \*\n \* This code is free software; you can redistribute it and/or modify it\n \* under the terms of the GNU General Public License version 2 only, as\n \* published by the Free Software Foundation.\n \*\n \* This code is distributed in the hope that it will be useful, but WITHOUT\n \* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or\n \* FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License\n \* version 2 for more details \(a copy is included in the LICENSE file that\n \* accompanied this code\).\n \*\n \* You should have received a copy of the GNU General Public License version\n \* 2 along with this work; if not, write to the Free Software Foundation,\n \* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.\n \*\n \* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA\n \* or visit www.oracle.com if you need additional information or have any\n \* questions.\n \*/\n.*
--- a/mxtool/copyrights/oracle.copyright.star	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,22 +0,0 @@
-/*
- * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
--- a/mxtool/copyrights/oracle.copyright.upl.regex.star	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-/\*\n \* Copyright \(c\) (?:(20[0-9][0-9]), )?(20[0-9][0-9]), Oracle and/or its affiliates\. All rights reserved\.\n \* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER\.\n \*\n \* The Universal Permissive License \(UPL\), Version 1\.0\n \* \n \* Subject to the condition set forth below, permission is hereby granted to any\n \* person obtaining a copy of this software, associated documentation and/or\n \* data \(collectively the "Software"\), free of charge and under any and all\n \* copyright rights in the Software, and any and all patent rights owned or\n \* freely licensable by each licensor hereunder covering either \(i\) the\n \* unmodified Software as contributed to or provided by such licensor, or \(ii\)\n \* the Larger Works \(as defined below\), to deal in both\n \* \n \* \(a\) the Software, and\n \* \n \* \(b\) any piece of software and/or hardware listed in the lrgrwrks\.txt file if\n \* one is included with the Software each a "Larger Work" to which the Software\n \* is contributed by such licensors\),\n \* \n \* without restriction, including without limitation the rights to copy, create\n \* derivative works of, display, perform, and distribute the Software and make,\n \* use, sell, offer for sale, import, export, have made, and have sold the\n \* Software and the Larger Work\(s\), and to sublicense the foregoing rights on\n \* either these or other terms\.\n \* \n \* This license is subject to the following condition:\n \* \n \* The above copyright notice and either this complete permission notice or at a\n \* minimum a reference to the UPL must be included in all copies or substantial\n \* portions of the Software\.\n \* \n \* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n \* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n \* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\. IN NO EVENT SHALL THE\n \* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n \* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n \* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n \* SOFTWARE\.\n \*/\n.*
--- a/mxtool/copyrights/oracle.copyright.upl.star	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * The Universal Permissive License (UPL), Version 1.0
- * 
- * Subject to the condition set forth below, permission is hereby granted to any
- * person obtaining a copy of this software, associated documentation and/or
- * data (collectively the "Software"), free of charge and under any and all
- * copyright rights in the Software, and any and all patent rights owned or
- * freely licensable by each licensor hereunder covering either (i) the
- * unmodified Software as contributed to or provided by such licensor, or (ii)
- * the Larger Works (as defined below), to deal in both
- * 
- * (a) the Software, and
- * 
- * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
- * one is included with the Software each a "Larger Work" to which the Software
- * is contributed by such licensors),
- * 
- * without restriction, including without limitation the rights to copy, create
- * derivative works of, display, perform, and distribute the Software and make,
- * use, sell, offer for sale, import, export, have made, and have sold the
- * Software and the Larger Work(s), and to sublicense the foregoing rights on
- * either these or other terms.
- * 
- * This license is subject to the following condition:
- * 
- * The above copyright notice and either this complete permission notice or at a
- * minimum a reference to the UPL must be included in all copies or substantial
- * portions of the Software.
- * 
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- * SOFTWARE.
- */
--- a/mxtool/mx	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,67 +0,0 @@
-#!/bin/bash
-#
-# ----------------------------------------------------------------------------------------------------
-#
-# Copyright (c) 2007, 2011, Oracle and/or its affiliates. All rights reserved.
-# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
-#
-# This code is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License version 2 only, as
-# published by the Free Software Foundation.
-#
-# This code is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
-# version 2 for more details (a copy is included in the LICENSE file that
-# accompanied this code).
-#
-# You should have received a copy of the GNU General Public License version
-# 2 along with this work; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
-# or visit www.oracle.com if you need additional information or have any
-# questions.
-#
-# ----------------------------------------------------------------------------------------------------
-
-dir=`/bin/pwd`
-
-# Resolve location of this script so that mx.py can be found in the same directory
-source="${BASH_SOURCE[${#BASH_SOURCE[@]} - 1]}"
-while [ -h "$source" ]; do
-  dir="$(cd -P "$(dirname "$source")" && pwd)"
-  source="$(readlink "$source")"
-  [[ $source != /* ]] && source="$dir/$source"
-done
-dir="$(cd -P "$(dirname "$source")" && pwd)"
-
-if [ ! -f "$dir/mx.py" ]; then
-    echo "Cannot find mx.py in $dir"
-    exit 1
-fi
-
-# Not all systems (e.g. Mac OS X) have 'python2' on the path.
-type python2.7 >/dev/null 2>&1
-if [ $? -eq 0 ]; then
-    python_exe=python2.7
-else
-    type python2 > /dev/null 2>&1
-    if [ $? -eq 0 ]; then
-        python_exe=python2
-    else
-        python_exe=python
-    fi
-fi
-
-$python_exe <<END
-import sys
-major, minor, micro, _, _ = sys.version_info
-if major != 2 or minor != 7:
-    raise SystemExit('The mx.py script requires Python 2.7, not {0}.{1}.{2}'.format(major, minor, micro))
-END
-if [ $? -eq 0 ]; then
-    exec $python_exe -u "$dir/mx.py" "$@"
-fi
-
-#end of file
--- a/mxtool/mx.py	Sat Jul 25 10:25:36 2015 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,5771 +0,0 @@
-#!/usr/bin/env python2.7
-#
-# ----------------------------------------------------------------------------------------------------
-#
-# Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved.
-# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
-#
-# This code is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License version 2 only, as
-# published by the Free Software Foundation.
-#
-# This code is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
-# version 2 for more details (a copy is included in the LICENSE file that
-# accompanied this code).
-#
-# You should have received a copy of the GNU General Public License version
-# 2 along with this work; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
-# or visit www.oracle.com if you need additional information or have any
-# questions.
-#
-# ----------------------------------------------------------------------------------------------------
-#
-r"""
-mx is a command line tool for managing the development of Java code organized as suites of projects.
-
-Version 1.x supports a single suite of projects.
-
-Full documentation can be found at https://wiki.openjdk.java.net/display/Graal/The+mx+Tool
-"""
-
-import sys, os, errno, time, subprocess, shlex, types, StringIO, zipfile, signal, xml.sax.saxutils, tempfile, fnmatch, platform
-import textwrap
-import socket
-import tarfile
-import hashlib
-import xml.parsers.expat
-import shutil, re, xml.dom.minidom
-import pipes
-import difflib
-from collections import Callable
-from threading import Thread
-from argparse import ArgumentParser, REMAINDER
-from os.path import join, basename, dirname, exists, getmtime, isabs, expandvars, isdir, isfile
-
-try:
-    # needed to work around https://bugs.python.org/issue1927
-    import readline
-    #then make pylint happy..
-    readline.get_line_buffer()
-except ImportError:
-    pass
-
-# Support for Python 2.6
-def check_output(*popenargs, **kwargs):
-    process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
-    output, _ = process.communicate()
-    retcode = process.poll()
-    if retcode:
-        cmd = kwargs.get("args")
-        if cmd is None:
-            cmd = popenargs[0]
-        error = subprocess.CalledProcessError(retcode, cmd)
-        error.output = output
-        raise error
-    return output
-
-# Support for jython
-def is_jython():
-    return sys.platform.startswith('java')
-
-if not is_jython():
-    import multiprocessing
-
-def cpu_count():
-    if is_jython():
-        from java.lang import Runtime
-        runtime = Runtime.getRuntime()
-        return runtime.availableProcessors()
-    else:
-        return multiprocessing.cpu_count()
-
-
-try: subprocess.check_output
-except: subprocess.check_output = check_output
-
-try: zipfile.ZipFile.__enter__
-except:
-    zipfile.ZipFile.__enter__ = lambda self: self
-    zipfile.ZipFile.__exit__ = lambda self, t, value, traceback: self.close()
-
-_projects = dict()
-_libs = dict()
-_jreLibs = dict()
-_dists = dict()
-_suites = dict()
-_annotationProcessors = None
-_primary_suite_path = None
-_primary_suite = None
-_opts = None
-_extra_java_homes = []
-_default_java_home = None
-_warn = False
-
-"""
-A distribution is a jar or zip file containing the output from one or more Java projects.
-"""
-class Distribution:
-    def __init__(self, suite, name, path, sourcesPath, deps, mainClass, excludedDependencies, distDependencies, javaCompliance, isProcessorDistribution=False):
-        self.suite = suite
-        self.name = name
-        self.path = path.replace('/', os.sep)
-        self.path = _make_absolute(self.path, suite.dir)
-        self.sourcesPath = _make_absolute(sourcesPath.replace('/', os.sep), suite.dir) if sourcesPath else None
-        self.deps = deps
-        self.update_listeners = set()
-        self.mainClass = mainClass
-        self.excludedDependencies = excludedDependencies
-        self.distDependencies = distDependencies
-        self.javaCompliance = JavaCompliance(javaCompliance) if javaCompliance else None
-        self.isProcessorDistribution = isProcessorDistribution
-
-    def sorted_deps(self, includeLibs=False, transitive=False, includeAnnotationProcessors=False):
-        deps = []
-        if transitive:
-            for depDist in [distribution(name) for name in self.distDependencies]:
-                for d in depDist.sorted_deps(includeLibs=includeLibs, transitive=True):
-                    if d not in deps:
-                        deps.append(d)
-        try:
-            excl = [dependency(d) for d in self.excludedDependencies]
-        except SystemExit as e:
-            abort('invalid excluded dependency for {0} distribution: {1}'.format(self.name, e))
-        return deps + [d for d in sorted_deps(self.deps, includeLibs=includeLibs, includeAnnotationProcessors=includeAnnotationProcessors) if d not in excl]
-
-    def __str__(self):
-        return self.name
-
-    def add_update_listener(self, listener):
-        self.update_listeners.add(listener)
-
-    def get_dist_deps(self, includeSelf=True, transitive=False):
-        deps = []
-        if includeSelf:
-            deps.append(self)
-        for name in self.distDependencies:
-            dist = distribution(name)
-            if dist not in deps:
-                deps.append(dist)
-        if transitive:
-            for depName in self.distDependencies:
-                for recDep in distribution(depName).get_dist_deps(False, True):
-                    if recDep not in deps:
-                        deps.append(recDep)
-        return list(deps)
-
-    """
-    Gets the directory in which the IDE project configuration
-    for this distribution is generated. If this is a distribution
-    derived from a project defining an annotation processor, then
-    None is return to indicate no IDE configuration should be
-    created for this distribution.
-    """
-    def get_ide_project_dir(self):
-        if hasattr(self, 'definingProject') and self.definingProject.definedAnnotationProcessorsDist == self:
-            return None
-        if hasattr(self, 'subDir'):
-            return join(self.suite.dir, self.subDir, self.name + '.dist')
-        else:
-            return join(self.suite.dir, self.name + '.dist')
-
-    def make_archive(self):
-        # are sources combined into main archive?
-        unified = self.path == self.sourcesPath
-
-        with Archiver(self.path) as arc:
-            with Archiver(None if unified else self.sourcesPath) as srcArcRaw:
-                srcArc = arc if unified else srcArcRaw
-                services = {}
-                def overwriteCheck(zf, arcname, source):
-                    if os.path.basename(arcname).startswith('.'):
-                        logv('Excluding dotfile: ' + source)
-                        return True
-                    if not hasattr(zf, '_provenance'):
-                        zf._provenance = {}
-                    existingSource = zf._provenance.get(arcname, None)
-                    isOverwrite = False
-                    if existingSource and existingSource != source:
-                        if arcname[-1] != os.path.sep:
-                            logv('warning: ' + self.path + ': avoid overwrite of ' + arcname + '\n  new: ' + source + '\n  old: ' + existingSource)
-                        isOverwrite = True
-                    zf._provenance[arcname] = source
-                    return isOverwrite
-
-                if self.mainClass:
-                    manifest = "Manifest-Version: 1.0\nMain-Class: %s\n\n" % (self.mainClass)
-                    if not overwriteCheck(arc.zf, "META-INF/MANIFEST.MF", "project files"):
-                        arc.zf.writestr("META-INF/MANIFEST.MF", manifest)
-
-                for dep in self.sorted_deps(includeLibs=True):
-                    isCoveredByDependecy = False
-                    for d in self.distDependencies:
-                        if dep in _dists[d].sorted_deps(includeLibs=True, transitive=True):
-                            logv("Excluding {0} from {1} because it's provided by the dependency {2}".format(dep.name, self.path, d))
-                            isCoveredByDependecy = True
-                            break
-
-                    if isCoveredByDependecy:
-                        continue
-
-                    if dep.isLibrary():
-                        l = dep
-                        # merge library jar into distribution jar
-                        logv('[' + self.path + ': adding library ' + l.name + ']')
-                        lpath = l.get_path(resolve=True)
-                        libSourcePath = l.get_source_path(resolve=True)
-                        if lpath:
-                            with zipfile.ZipFile(lpath, 'r') as lp:
-                                for arcname in lp.namelist():
-                                    if arcname.startswith('META-INF/services/') and not arcname == 'META-INF/services/':
-                                        service = arcname[len('META-INF/services/'):]
-                                        assert '/' not in service
-                                        services.setdefault(service, []).extend(lp.read(arcname).splitlines())
-                                    else:
-                                        if not overwriteCheck(arc.zf, arcname, lpath + '!' + arcname):
-                                            arc.zf.writestr(arcname, lp.read(arcname))
-                        if srcArc.zf and libSourcePath:
-                            with zipfile.ZipFile(libSourcePath, 'r') as lp:
-                                for arcname in lp.namelist():
-                                    if not overwriteCheck(srcArc.zf, arcname, lpath + '!' + arcname):
-                                        srcArc.zf.writestr(arcname, lp.read(arcname))
-                    elif dep.isProject():
-                        p = dep
-
-                        if self.javaCompliance:
-                            if p.javaCompliance > self.javaCompliance:
-                                abort("Compliance level doesn't match: Distribution {0} requires {1}, but {2} is {3}.".format(self.name, self.javaCompliance, p.name, p.javaCompliance))
-
-                        logv('[' + self.path + ': adding project ' + p.name + ']')
-                        outputDir = p.output_dir()
-                        for root, _, files in os.walk(outputDir):
-                            relpath = root[len(outputDir) + 1:]
-                            if relpath == join('META-INF', 'services'):
-                                for service in files:
-                                    with open(join(root, service), 'r') as fp:
-                                        services.setdefault(service, []).extend([provider.strip() for provider in fp.readlines()])
-                            elif relpath == join('META-INF', 'providers'):
-                                for provider in files:
-                                    with open(join(root, provider), 'r') as fp:
-                                        for service in fp:
-                                            services.setdefault(service.strip(), []).append(provider)
-                            else:
-                                for f in files:
-                                    arcname = join(relpath, f).replace(os.sep, '/')
-                                    if not overwriteCheck(arc.zf, arcname, join(root, f)):
-                                        arc.zf.write(join(root, f), arcname)
-                        if srcArc.zf:
-                            sourceDirs = p.source_dirs()
-                            if p.source_gen_dir():
-                                sourceDirs.append(p.source_gen_dir())
-                            for srcDir in sourceDirs:
-                                for root, _, files in os.walk(srcDir):
-                                    relpath = root[len(srcDir) + 1:]
-                                    for f in files:
-                                        if f.endswith('.java'):
-                                            arcname = join(relpath, f).replace(os.sep, '/')
-                                            if not overwriteCheck(srcArc.zf, arcname, join(root, f)):
-                                                srcArc.zf.write(join(root, f), arcname)
-
-                for service, providers in services.iteritems():
-                    arcname = 'META-INF/services/' + service
-                    arc.zf.writestr(arcname, '\n'.join(providers))
-
-        self.notify_updated()
-
-
-    def notify_updated(self):
-        for l in self.update_listeners:
-            l(self)
-
-"""
-A dependency is a library or project specified in a suite.
-"""
-class Dependency:
-    def __init__(self, suite, name):
-        self.name = name
-        self.suite = suite
-
-    def __cmp__(self, other):
-        return cmp(self.name, other.name)
-
-    def __str__(self):
-        return self.name
-
-    def __eq__(self, other):
-        return self.name == other.name
-
-    def __ne__(self, other):
-        return self.name != other.name
-
-    def __hash__(self):
-        return hash(self.name)
-
-    def isLibrary(self):
-        return isinstance(self, Library)
-
-    def isJreLibrary(self):
-        return isinstance(self, JreLibrary)
-
-    def isProject(self):
-        return isinstance(self, Project)
-
-class Project(Dependency):
-    def __init__(self, suite, name, srcDirs, deps, javaCompliance, workingSets, d):
-        Dependency.__init__(self, suite, name)
-        self.srcDirs = srcDirs
-        self.deps = deps
-        self.checkstyleProj = name
-        self.javaCompliance = JavaCompliance(javaCompliance) if javaCompliance is not None else None
-        self.native = False
-        self.workingSets = workingSets
-        self.dir = d
-
-        # The annotation processors defined by this project
-        self.definedAnnotationProcessors = None
-        self.definedAnnotationProcessorsDist = None
-
-        # Create directories for projects that don't yet exist
-        if not exists(d):
-            os.mkdir(d)
-        for s in self.source_dirs():
-            if not exists(s):
-                os.mkdir(s)
-
-    def all_deps(self, deps, includeLibs, includeSelf=True, includeJreLibs=False, includeAnnotationProcessors=False):
-        """
-        Add the transitive set of dependencies for this project, including
-        libraries if 'includeLibs' is true, to the 'deps' list.
-        """
-        return sorted(self._all_deps_helper(deps, [], includeLibs, includeSelf, includeJreLibs, includeAnnotationProcessors))
-
-    def _all_deps_helper(self, deps, dependants, includeLibs, includeSelf=True, includeJreLibs=False, includeAnnotationProcessors=False):
-        if self in dependants:
-            abort(str(self) + 'Project dependency cycle found:\n    ' +
-                  '\n        |\n        V\n    '.join(map(str, dependants[dependants.index(self):])) +
-                  '\n        |\n        V\n    ' + self.name)
-        childDeps = list(self.deps)
-        if includeAnnotationProcessors and len(self.annotation_processors()) > 0:
-            childDeps = self.annotation_processors() + childDeps
-        if self in deps:
-            return deps
-        for name in childDeps:
-            assert name != self.name
-            dep = dependency(name)
-            if not dep in deps:
-                if dep.isProject():
-                    dep._all_deps_helper(deps, dependants + [self], includeLibs=includeLibs, includeJreLibs=includeJreLibs, includeAnnotationProcessors=includeAnnotationProcessors)
-                elif dep.isProject or (dep.isLibrary() and includeLibs) or (dep.isJreLibrary() and includeJreLibs):
-                    dep.all_deps(deps, includeLibs=includeLibs, includeJreLibs=includeJreLibs, includeAnnotationProcessors=includeAnnotationProcessors)
-        if not self in deps and includeSelf:
-            deps.append(self)
-        return deps
-
-    def _compute_max_dep_distances(self, name, distances, dist):
-        currentDist = distances.get(name)
-        if currentDist is None or currentDist < dist:
-            distances[name] = dist
-            p = project(name, False)
-            if p is not None:
-                for dep in p.deps:
-                    self._compute_max_dep_distances(dep, distances, dist + 1)
-
-    def canonical_deps(self):
-        """
-        Get the dependencies of this project that are not recursive (i.e. cannot be reached
-        via other dependencies).
-        """
-        distances = dict()
-        result = set()
-        self._compute_max_dep_distances(self.name, distances, 0)
-        for n, d in distances.iteritems():
-            assert d > 0 or n == self.name
-            if d == 1:
-                result.add(n)
-
-        if len(result) == len(self.deps) and frozenset(self.deps) == result:
-            return self.deps
-        return result
-
-    def max_depth(self):
-        """
-        Get the maximum canonical distance between this project and its most distant dependency.
-        """
-        distances = dict()
-        self._compute_max_dep_distances(self.name, distances, 0)
-        return max(distances.values())
-
-    def source_dirs(self):
-        """
-        Get the directories in which the sources of this project are found.
-        """
-        return [join(self.dir, s) for s in self.srcDirs]
-
-    def source_gen_dir(self):
-        """
-        Get the directory in which source files generated by the annotation processor are found/placed.
-        """
-        if self.native:
-            return None
-        return join(self.dir, 'src_gen')
-
-    def output_dir(self):
-        """
-        Get the directory in which the class files of this project are found/placed.
-        """
-        if self.native:
-            return None
-        return join(self.dir, 'bin')
-
-    def jasmin_output_dir(self):
-        """
-        Get the directory in which the Jasmin assembled class files of this project are found/placed.
-        """
-        if self.native:
-            return None
-        return join(self.dir, 'jasmin_classes')
-
-    def append_to_classpath(self, cp, resolve):
-        if not self.native:
-            cp.append(self.output_dir())
-
-    def find_classes_with_matching_source_line(self, pkgRoot, function, includeInnerClasses=False):
-        """
-        Scan the sources of this project for Java source files containing a line for which
-        'function' returns true. A map from class name to source file path for each existing class
-        corresponding to a matched source file is returned.
-        """
-        result = dict()
-        pkgDecl = re.compile(r"^package\s+([a-zA-Z_][\w\.]*)\s*;$")
-        for srcDir in self.source_dirs():
-            outputDir = self.output_dir()
-            for root, _, files in os.walk(srcDir):
-                for name in files:
-                    if name.endswith('.java') and name != 'package-info.java':
-                        matchFound = False
-                        source = join(root, name)
-                        with open(source) as f:
-                            pkg = None
-                            for line in f:
-                                if line.startswith("package "):
-                                    match = pkgDecl.match(line)
-                                    if match:
-                                        pkg = match.group(1)
-                                if function(line.strip()):
-                                    matchFound = True
-                                if pkg and matchFound:
-                                    break
-
-                        if matchFound:
-                            simpleClassName = name[:-len('.java')]
-                            assert pkg is not None, 'could not find package statement in file ' + name
-                            if pkgRoot is None or pkg.startswith(pkgRoot):
-                                pkgOutputDir = join(outputDir, pkg.replace('.', os.path.sep))
-                                if exists(pkgOutputDir):
-                                    for e in os.listdir(pkgOutputDir):
-                                        if includeInnerClasses:
-                                            if e.endswith('.class') and (e.startswith(simpleClassName) or e.startswith(simpleClassName + '$')):
-                                                className = pkg + '.' + e[:-len('.class')]
-                                                result[className] = source
-                                        elif e == simpleClassName + '.class':
-                                            className = pkg + '.' + simpleClassName
-                                            result[className] = source
-        return result
-
-    def _init_packages_and_imports(self):
-        if not hasattr(self, '_defined_java_packages'):
-            packages = set()
-            extendedPackages = set()
-            depPackages = set()
-            for d in self.all_deps([], includeLibs=False, includeSelf=False):
-                depPackages.update(d.defined_java_packages())
-            imports = set()
-            importRe = re.compile(r'import\s+(?:static\s+)?([^;]+);')
-            for sourceDir in self.source_dirs():
-                for root, _, files in os.walk(sourceDir):
-                    javaSources = [name for name in files if name.endswith('.java')]
-                    if len(javaSources) != 0:
-                        pkg = root[len(sourceDir) + 1:].replace(os.sep, '.')
-                        if not pkg in depPackages:
-                            packages.add(pkg)
-                        else:
-                            # A project extends a package already defined by one of it dependencies
-                            extendedPackages.add(pkg)
-                            imports.add(pkg)
-
-                        for n in javaSources:
-                            with open(join(root, n)) as fp:
-                                content = fp.read()
-                                imports.update(importRe.findall(content))
-            self._defined_java_packages = frozenset(packages)
-            self._extended_java_packages = frozenset(extendedPackages)
-
-            importedPackages = set()
-            for imp in imports:
-                name = imp
-                while not name in depPackages and len(name) > 0:
-                    lastDot = name.rfind('.')
-                    if lastDot == -1:
-                        name = None
-                        break
-                    name = name[0:lastDot]
-                if name is not None:
-                    importedPackages.add(name)
-            self._imported_java_packages = frozenset(importedPackages)
-
-    def defined_java_packages(self):
-        """Get the immutable set of Java packages defined by the Java sources of this project"""
-        self._init_packages_and_imports()
-        return self._defined_java_packages
-
-    def extended_java_packages(self):
-        """Get the immutable set of Java packages extended by the Java sources of this project"""
-        self._init_packages_and_imports()
-        return self._extended_java_packages
-
-    def imported_java_packages(self):
-        """Get the immutable set of Java packages defined by other Java projects that are
-           imported by the Java sources of this project."""
-        self._init_packages_and_imports()
-        return self._imported_java_packages
-
-    """
-    Gets the list of projects defining the annotation processors that will be applied
-    when compiling this project. This includes the projects declared by the annotationProcessors property
-    of this project and any of its project dependencies. It also includes
-    any project dependencies that define an annotation processors.
-    """
-    def annotation_processors(self):
-        if not hasattr(self, '_annotationProcessors'):
-            aps = set()
-            if hasattr(self, '_declaredAnnotationProcessors'):
-                aps = set(self._declaredAnnotationProcessors)
-                for ap in aps:
-                    if project(ap).definedAnnotationProcessorsDist is None:
-                        config = join(project(ap).source_dirs()[0], 'META-INF', 'services', 'javax.annotation.processing.Processor')
-                        if not exists(config):
-                            TimeStampFile(config).touch()
-                        abort('Project ' + ap + ' declared in annotationProcessors property of ' + self.name + ' does not define any annotation processors.\n' +
-                              'Please specify the annotation processors in ' + config)
-
-            allDeps = self.all_deps([], includeLibs=False, includeSelf=False, includeAnnotationProcessors=False)
-            for p in allDeps:
-                # Add an annotation processor dependency
-                if p.definedAnnotationProcessorsDist is not None:
-                    aps.add(p.name)
-
-                # Inherit annotation processors from dependencies
-                aps.update(p.annotation_processors())
-
-            self._annotationProcessors = sorted(list(aps))
-        return self._annotationProcessors
-
-    """
-    Gets the class path composed of the distribution jars containing the 
-    annotation processors that will be applied when compiling this project.
-    """
-    def annotation_processors_path(self):
-        aps = [project(ap) for ap in self.annotation_processors()]
-        libAps = [dep for dep in self.all_deps([], includeLibs=True, includeSelf=False) if dep.isLibrary() and hasattr(dep, 'annotationProcessor') and getattr(dep, 'annotationProcessor').lower() == 'true']
-        if len(aps) + len(libAps):
-            return os.pathsep.join([ap.definedAnnotationProcessorsDist.path for ap in aps if ap.definedAnnotationProcessorsDist] + [lib.get_path(False) for lib in libAps])
-        return None
-
-    def uses_annotation_processor_library(self):
-        for dep in self.all_deps([], includeLibs=True, includeSelf=False):
-            if dep.isLibrary() and hasattr(dep, 'annotationProcessor'):
-                return True
-        return False
-
-    def update_current_annotation_processors_file(self):
-        aps = self.annotation_processors()
-        outOfDate = False
-        currentApsFile = join(self.suite.mxDir, 'currentAnnotationProcessors', self.name)
-        currentApsFileExists = exists(currentApsFile)
-        if currentApsFileExists:
-            with open(currentApsFile) as fp:
-                currentAps = [l.strip() for l in fp.readlines()]
-                if currentAps != aps:
-                    outOfDate = True
-        if outOfDate or not currentApsFileExists:
-            if not exists(dirname(currentApsFile)):
-                os.mkdir(dirname(currentApsFile))
-            with open(currentApsFile, 'w') as fp:
-                for ap in aps:
-                    print >> fp, ap
-        return outOfDate
-
-    def make_archive(self, path=None):
-        outputDir = self.output_dir()
-        if not path:
-            path = join(self.dir, self.name + '.jar')
-        with Archiver(path) as arc:
-            for root, _, files in os.walk(outputDir):
-                for f in files:
-                    relpath = root[len(outputDir) + 1:]
-                    arcname = join(relpath, f).replace(os.sep, '/')
-                    arc.zf.write(join(root, f), arcname)
-        return path
-
-def _make_absolute(path, prefix):
-    """
-    Makes 'path' absolute if it isn't already by prefixing 'prefix'
-    """
-    if not isabs(path):
-        return join(prefix, path)
-    return path
-
-def sha1OfFile(path):
-    with open(path, 'rb') as f:
-        d = hashlib.sha1()
-        while True:
-            buf = f.read(4096)
-            if not buf:
-                break
-            d.update(buf)
-        return d.hexdigest()
-
-def download_file_with_sha1(name, path, urls, sha1, sha1path, resolve, mustExist, sources=False, canSymlink=True):
-    canSymlink = canSymlink and not (get_os() == 'windows' or get_os() == 'cygwin')
-    def _download_lib():
-        cacheDir = _cygpathW2U(get_env('MX_CACHE_DIR', join(_opts.user_home, '.mx', 'cache')))
-        if not exists(cacheDir):
-            os.makedirs(cacheDir)
-        base = basename(path)
-        cachePath = join(cacheDir, base + '_' + sha1)
-
-        if not exists(cachePath) or sha1OfFile(cachePath) != sha1:
-            if exists(cachePath):
-                log('SHA1 of ' + cachePath + ' does not match expected value (' + sha1 + ') - found ' + sha1OfFile(cachePath) + ' - re-downloading')
-            print 'Downloading ' + ("sources " if sources else "") + name + ' from ' + str(urls)
-            download(cachePath, urls)
-
-        d = dirname(path)
-        if d != '' and not exists(d):
-            os.makedirs(d)
-
-        if canSymlink and 'symlink' in dir(os):
-            if exists(path):
-                os.unlink(path)
-            try:
-                os.symlink(cachePath, path)
-            except OSError as e:
-                # When doing parallel building, the symlink can fail
-                # if another thread wins the race to create the symlink
-                if not exists(path):
-                    # It was some other error
-                    raise e
-
-        else:
-            shutil.copy(cachePath, path)
-
-    def _sha1Cached():
-        with open(sha1path, 'r') as f:
-            return f.read()[0:40]
-
-    def _writeSha1Cached():
-        with open(sha1path, 'w') as f:
-            f.write(sha1OfFile(path))
-
-    if resolve and mustExist and not exists(path):
-        assert not len(urls) == 0, 'cannot find required library ' + name + ' ' + path
-        _download_lib()
-
-    if exists(path):
-        if sha1 and not exists(sha1path):
-            _writeSha1Cached()
-
-        if sha1 and sha1 != _sha1Cached():
-            _download_lib()
-            if sha1 != sha1OfFile(path):
-                abort("SHA1 does not match for " + name + ". Broken download? SHA1 not updated in projects file?")
-            _writeSha1Cached()
-
-    return path
-
-class BaseLibrary(Dependency):
-    def __init__(self, suite, name, optional):
-        Dependency.__init__(self, suite, name)
-        self.optional = optional
-
-    def __ne__(self, other):
-        result = self.__eq__(other)
-        if result is NotImplemented:
-            return result
-        return not result
-
-"""
-A library that will be provided by the JRE but may be absent.
-Any project or normal library that depends on a missing library
-will be removed from the global project and library dictionaries
-(i.e., _projects and _libs).
-
-This mechanism exists primarily to be able to support code
-that may use functionality in one JRE (e.g., Oracle JRE)
-that is not present in another JRE (e.g., OpenJDK). A
-motivating example is the Java Flight Recorder library
-found in the Oracle JRE. 
-"""
-class JreLibrary(BaseLibrary):
-    def __init__(self, suite, name, jar, optional):
-        BaseLibrary.__init__(self, suite, name, optional)
-        self.jar = jar
-
-    def __eq__(self, other):
-        if isinstance(other, JreLibrary):
-            return self.jar == other.jar
-        else:
-            return NotImplemented
-
-    def is_present_in_jdk(self, jdk):
-        return jdk.containsJar(self.jar)
-
-    def all_deps(self, deps, includeLibs, includeSelf=True, includeJreLibs=False, includeAnnotationProcessors=False):
-        """
-        Add the transitive set of dependencies for this JRE library to the 'deps' list.
-        """
-        if includeJreLibs and includeSelf and not self in deps:
-            deps.append(self)
-        return sorted(deps)
-
-class Library(BaseLibrary):
-    def __init__(self, suite, name, path, optional, urls, sha1, sourcePath, sourceUrls, sourceSha1, deps):
-        BaseLibrary.__init__(self, suite, name, optional)
-        self.path = path.replace('/', os.sep)
-        self.urls = urls
-        self.sha1 = sha1
-        self.sourcePath = sourcePath
-        self.sourceUrls = sourceUrls
-        if sourcePath == path:
-            assert sourceSha1 is None or sourceSha1 == sha1
-            sourceSha1 = sha1
-        self.sourceSha1 = sourceSha1
-        self.deps = deps
-        abspath = _make_absolute(path, self.suite.dir)
-        if not optional and not exists(abspath):
-            if not len(urls):
-                abort('Non-optional library {0} must either exist at {1} or specify one or more URLs from which it can be retrieved'.format(name, abspath))
-
-        def _checkSha1PropertyCondition(propName, cond, inputPath):
-            if not cond:
-                absInputPath = _make_absolute(inputPath, self.suite.dir)
-                if exists(absInputPath):
-                    abort('Missing "{0}" property for library {1}. Add the following line to projects file:\nlibrary@{2}@{3}={4}'.format(propName, name, name, propName, sha1OfFile(absInputPath)))
-                abort('Missing "{0}" property for library {1}'.format(propName, name))
-
-        _checkSha1PropertyCondition('sha1', sha1, path)
-        _checkSha1PropertyCondition('sourceSha1', not sourcePath or sourceSha1, sourcePath)
-
-        for url in urls:
-            if url.endswith('/') != self.path.endswith(os.sep):
-                abort('Path for dependency directory must have a URL ending with "/": path=' + self.path + ' url=' + url)
-
-    def __eq__(self, other):
-        if isinstance(other, Library):
-            if len(self.urls) == 0:
-                return self.path == other.path
-            else:
-                return self.urls == other.urls
-        else:
-            return NotImplemented
-
-    def get_path(self, resolve):
-        path = _make_absolute(self.path, self.suite.dir)
-        sha1path = path + '.sha1'
-
-        includedInJDK = getattr(self, 'includedInJDK', None)
-        # TODO since we don't know which JDK will be used, this check is dubious
-        if includedInJDK and java().javaCompliance >= JavaCompliance(includedInJDK):
-            return None
-
-        bootClassPathAgent = getattr(self, 'bootClassPathAgent').lower() == 'true' if hasattr(self, 'bootClassPathAgent') else False
-
-        return download_file_with_sha1(self.name, path, self.urls, self.sha1, sha1path, resolve, not self.optional, canSymlink=not bootClassPathAgent)
-
-    def get_source_path(self, resolve):
-        if self.sourcePath is None:
-            return None
-        path = _make_absolute(self.sourcePath, self.suite.dir)
-        sha1path = path + '.sha1'
-
-        return download_file_with_sha1(self.name, path, self.sourceUrls, self.sourceSha1, sha1path, resolve, len(self.sourceUrls) != 0, sources=True)
-
-    def append_to_classpath(self, cp, resolve):
-        path = self.get_path(resolve)
-        if path and (exists(path) or not resolve):
-            cp.append(path)
-
-    def all_deps(self, deps, includeLibs, includeSelf=True, includeJreLibs=False, includeAnnotationProcessors=False):
-        """
-        Add the transitive set of dependencies for this library to the 'deps' list.
-        """
-        if not includeLibs:
-            return sorted(deps)
-        childDeps = list(self.deps)
-        if self in deps:
-            return sorted(deps)
-        for name in childDeps:
-            assert name != self.name
-            dep = library(name)
-            if not dep in deps:
-                dep.all_deps(deps, includeLibs=includeLibs, includeJreLibs=includeJreLibs, includeAnnotationProcessors=includeAnnotationProcessors)
-        if not self in deps and includeSelf:
-            deps.append(self)
-        return sorted(deps)
-
-class HgConfig:
-    """
-    Encapsulates access to Mercurial (hg)
-    """
-    def __init__(self):
-        self.missing = 'no hg executable found'
-        self.has_hg = None
-
-    def check(self, abortOnFail=True):
-        if self.has_hg is None:
-            try:
-                subprocess.check_output(['hg'])
-                self.has_hg = True
-            except OSError:
-                self.has_hg = False
-                warn(self.missing)
-
-        if not self.has_hg:
-            if abortOnFail:
-                abort(self.missing)
-            else:
-                warn(self.missing)
-
-    def tip(self, sDir, abortOnError=True):
-        try:
-            return subprocess.check_output(['hg', 'tip', '-R', sDir, '--template', '{node}'])
-        except OSError:
-            warn(self.missing)
-        except subprocess.CalledProcessError:
-            if abortOnError:
-                abort('failed to get tip revision id')
-            else:
-                return None
-
-    def isDirty(self, sDir, abortOnError=True):
-        try:
-            return len(subprocess.check_output(['hg', 'status', '-R', sDir])) > 0
-        except OSError:
-            warn(self.missing)
-        except subprocess.CalledProcessError:
-            if abortOnError:
-                abort('failed to get status')
-            else:
-                return None
-
-    def locate(self, sDir, patterns=None, abortOnError=True):
-        try:
-            if patterns is None:
-                patterns = []
-            elif not isinstance(patterns, list):
-                patterns = [patterns]
-            return subprocess.check_output(['hg', 'locate', '-R', sDir] + patterns).split('\n')
-        except OSError:
-            warn(self.missing)
-        except subprocess.CalledProcessError as e:
-            if e.returncode == 1:
-                # hg locate returns 1 if no matches were found
-                return []
-            if abortOnError:
-                abort('failed to locate')
-            else:
-                return None
-
-def _load_suite_dict(mxDir):
-
-    suffix = 1
-    suite = None
-    dictName = 'suite'
-
-    def expand(value, context):
-        if isinstance(value, types.DictionaryType):
-            for n, v in value.iteritems():
-                value[n] = expand(v, context + [n])
-        elif isinstance(value, types.ListType):
-            for i in range(len(value)):
-                value[i] = expand(value[i], context + [str(i)])
-        else:
-            if not isinstance(value, types.StringTypes):
-                abort('value of ' + '.'.join(context) + ' is of unexpected type ' + str(type(value)))
-            value = expandvars(value)
-            if '$' in value or '%' in value:
-                abort('value of ' + '.'.join(context) + ' contains an undefined environment variable: ' + value)
-
-        return value
-
-    moduleName = 'suite'
-    modulePath = join(mxDir, moduleName + '.py')
-    while exists(modulePath):
-
-        savedModule = sys.modules.get(moduleName)
-        if savedModule:
-            warn(modulePath + ' conflicts with ' + savedModule.__file__)
-        # temporarily extend the Python path
-        sys.path.insert(0, mxDir)
-
-        snapshot = frozenset(sys.modules.keys())
-        module = __import__(moduleName)
-
-        if savedModule:
-            # restore the old module into the module name space
-            sys.modules[moduleName] = savedModule
-        else:
-            # remove moduleName from the module name space
-            sys.modules.pop(moduleName)
-
-        # For now fail fast if extra modules were loaded.
-        # This can later be relaxed to simply remove the extra modules
-        # from the sys.modules name space if necessary.
-        extraModules = frozenset(sys.modules.keys()) - snapshot
-        assert len(extraModules) == 0, 'loading ' + modulePath + ' caused extra modules to be loaded: ' + ', '.join([m for m in extraModules])
-
-        # revert the Python path
-        del sys.path[0]
-
-        if not hasattr(module, dictName):
-            abort(modulePath + ' must define a variable named "' + dictName + '"')
-        d = expand(getattr(module, dictName), [dictName])
-        sections = ['projects', 'libraries', 'jrelibraries', 'distributions'] + (['distribution_extensions'] if suite else ['name', 'mxversion'])
-        unknown = frozenset(d.keys()) - frozenset(sections)
-        if unknown:
-            abort(modulePath + ' defines unsupported suite sections: ' + ', '.join(unknown))
-
-        if suite is None:
-            suite = d
-        else:
-            for s in sections:
-                existing = suite.get(s)
-                additional = d.get(s)
-                if additional:
-                    if not existing:
-                        suite[s] = additional
-                    else:
-                        conflicting = frozenset(additional.keys()) & frozenset(existing.keys())
-                        if conflicting:
-                            abort(modulePath + ' redefines: ' + ', '.join(conflicting))
-                        existing.update(additional)
-            distExtensions = d.get('distribution_extensions')
-            if distExtensions:
-                existing = suite['distributions']
-                for n, attrs in distExtensions.iteritems():
-                    original = existing.get(n)
-                    if not original:
-                        abort('cannot extend non-existing distribution ' + n)
-                    for k, v in attrs.iteritems():
-                        if k != 'dependencies':
-                            abort('Only the dependencies of distribution ' + n + ' can be extended')
-                        if not isinstance(v, types.ListType):
-                            abort('distribution_extensions.' + n + '.dependencies must be a list')
-                        original['dependencies'] += v
-
-        dictName = 'extra'
-        moduleName = 'suite' + str(suffix)
-        modulePath = join(mxDir, moduleName + '.py')
-
-        deprecatedModulePath = join(mxDir, 'projects' + str(suffix) + '.py')
-        if exists(deprecatedModulePath):
-            abort('Please rename ' + deprecatedModulePath + ' to ' + modulePath)
-
-        suffix = suffix + 1
-
-    return suite, modulePath
-
-class Suite:
-    def __init__(self, mxDir, primary, load=True):
-        self.dir = dirname(mxDir)
-        self.mxDir = mxDir
-        self.projects = []
-        self.libs = []
-        self.jreLibs = []
-        self.dists = []
-        self.commands = None
-        self.primary = primary
-        self.requiredMxVersion = None
-        self.name = _suitename(mxDir)  # validated in _load_projects
-        if load:
-            # just check that there are no imports
-            self._load_imports()
-            self._load_env()
-            self._load_commands()
-        _suites[self.name] = self
-
-    def __str__(self):
-        return self.name
-
-    def _load_projects(self):
-        suitePyFile = join(self.mxDir, 'suite.py')
-        if not exists(suitePyFile):
-            return
-
-        suiteDict, _ = _load_suite_dict(self.mxDir)
-
-        if suiteDict.get('name') is not None and suiteDict.get('name') != self.name:
-            abort('suite name in project file does not match ' + _suitename(self.mxDir))
-
-        if suiteDict.has_key('mxversion'):
-            try:
-                self.requiredMxVersion = VersionSpec(suiteDict['mxversion'])
-            except AssertionError as ae:
-                abort('Exception while parsing "mxversion" in project file: ' + str(ae))
-
-        libsMap = suiteDict['libraries']
-        jreLibsMap = suiteDict['jrelibraries']
-        projsMap = suiteDict['projects']
-        distsMap = suiteDict['distributions']
-
-        def pop_list(attrs, name, context):
-            v = attrs.pop(name, None)
-            if not v:
-                return []
-            if not isinstance(v, list):
-                abort('Attribute "' + name + '" for ' + context + ' must be a list')
-            return v
-
-        for name, attrs in sorted(projsMap.iteritems()):
-            context = 'project ' + name
-            srcDirs = pop_list(attrs, 'sourceDirs', context)
-            deps = pop_list(attrs, 'dependencies', context)
-            ap = pop_list(attrs, 'annotationProcessors', context)
-            javaCompliance = attrs.pop('javaCompliance', None)
-            subDir = attrs.pop('subDir', None)
-            if subDir is None:
-                d = join(self.dir, name)
-            else:
-                d = join(self.dir, subDir, name)
-            workingSets = attrs.pop('workingSets', None)
-            p = Project(self, name, srcDirs, deps, javaCompliance, workingSets, d)
-            p.checkstyleProj = attrs.pop('checkstyle', name)
-            p.native = attrs.pop('native', '') == 'true'
-            p.checkPackagePrefix = attrs.pop('checkPackagePrefix', 'true') == 'true'
-            if not p.native and p.javaCompliance is None:
-                abort('javaCompliance property required for non-native project ' + name)
-            if len(ap) > 0:
-                p._declaredAnnotationProcessors = ap
-            p.__dict__.update(attrs)
-            self.projects.append(p)
-
-        for name, attrs in sorted(jreLibsMap.iteritems()):
-            jar = attrs.pop('jar')
-            # JRE libraries are optional by default
-            optional = attrs.pop('optional', 'true') != 'false'
-            l = JreLibrary(self, name, jar, optional)
-            self.jreLibs.append(l)
-
-        for name, attrs in sorted(libsMap.iteritems()):
-            context = 'library ' + name
-            if "|" in name:
-                if name.count('|') != 2:
-                    abort("Format error in library name: " + name + "\nsyntax: libname|os-platform|architecture")
-                name, platform, architecture = name.split("|")
-                if platform != get_os() or architecture != get_arch():
-                    continue
-            path = attrs.pop('path')
-            urls = pop_list(attrs, 'urls', context)
-            sha1 = attrs.pop('sha1', None)
-            sourcePath = attrs.pop('sourcePath', None)
-            sourceUrls = pop_list(attrs, 'sourceUrls', context)
-            sourceSha1 = attrs.pop('sourceSha1', None)
-            deps = pop_list(attrs, 'dependencies', context)
-            # Add support optional libraries once we have a good use case
-            optional = False
-            l = Library(self, name, path, optional, urls, sha1, sourcePath, sourceUrls, sourceSha1, deps)
-            l.__dict__.update(attrs)
-            self.libs.append(l)
-
-        for name, attrs in sorted(distsMap.iteritems()):
-            context = 'distribution ' + name
-            path = attrs.pop('path')
-            sourcesPath = attrs.pop('sourcesPath', None)
-            deps = pop_list(attrs, 'dependencies', context)
-            mainClass = attrs.pop('mainClass', None)
-            exclDeps = pop_list(attrs, 'exclude', context)
-            distDeps = pop_list(attrs, 'distDependencies', context)
-            javaCompliance = attrs.pop('javaCompliance', None)
-            d = Distribution(self, name, path, sourcesPath, deps, mainClass, exclDeps, distDeps, javaCompliance)
-            d.__dict__.update(attrs)
-            self.dists.append(d)
-
-        # Create a distribution for each project that defines annotation processors
-        for p in self.projects:
-            annotationProcessors = None
-            for srcDir in p.source_dirs():
-                configFile = join(srcDir, 'META-INF', 'services', 'javax.annotation.processing.Processor')
-                if exists(configFile):
-                    with open(configFile) as fp:
-                        annotationProcessors = [ap.strip() for ap in fp]
-                        if len(annotationProcessors) != 0:
-                            for ap in annotationProcessors:
-                                if not ap.startswith(p.name):
-                                    abort(ap + ' in ' + configFile + ' does not start with ' + p.name)
-            if annotationProcessors:
-                dname = p.name.replace('.', '_').upper()
-                apDir = join(p.dir, 'ap')
-                path = join(apDir, p.name + '.jar')
-                sourcesPath = None
-                deps = [p.name]
-                mainClass = None
-                exclDeps = []
-                distDeps = []
-                javaCompliance = None
-                d = Distribution(self, dname, path, sourcesPath, deps, mainClass, exclDeps, distDeps, javaCompliance, True)
-                d.subDir = os.path.relpath(os.path.dirname(p.dir), self.dir)
-                self.dists.append(d)
-                p.definedAnnotationProcessors = annotationProcessors
-                p.definedAnnotationProcessorsDist = d
-                d.definingProject = p
-
-                # Restrict exported annotation processors to those explicitly defined by the project
-                def _refineAnnotationProcessorServiceConfig(dist):
-                    aps = dist.definingProject.definedAnnotationProcessors
-                    apsJar = dist.path
-                    config = 'META-INF/services/javax.annotation.processing.Processor'
-                    with zipfile.ZipFile(apsJar, 'r') as zf:
-                        currentAps = zf.read(config).split()
-                    if currentAps != aps:
-                        logv('[updating ' + config + ' in ' + apsJar + ']')
-                        with Archiver(apsJar) as arc:
-                            with zipfile.ZipFile(apsJar, 'r') as lp:
-                                for arcname in lp.namelist():
-                                    if arcname == config:
-                                        arc.zf.writestr(arcname, '\n'.join(aps))
-                                    else:
-                                        arc.zf.writestr(arcname, lp.read(arcname))
-                d.add_update_listener(_refineAnnotationProcessorServiceConfig)
-                self.dists.append(d)
-
-        if self.name is None:
-            abort('Missing "suite=<name>" in ' + suitePyFile)
-
-    def _commands_name(self):
-        return 'mx_' + self.name.replace('-', '_')
-
-    def _find_commands(self, name):
-        commandsPath = join(self.mxDir, name + '.py')
-        if exists(commandsPath):
-            return name
-        else:
-            return None
-
-    def _load_commands(self):
-        commandsName = self._find_commands(self._commands_name())
-        if commandsName is None:
-            # backwards compatibility
-            commandsName = self._find_commands('commands')
-        if commandsName is not None:
-            if commandsName in sys.modules:
-                abort(commandsName + '.py in suite ' + self.name + ' duplicates ' + sys.modules[commandsName].__file__)
-            # temporarily extend the Python path
-            sys.path.insert(0, self.mxDir)
-            mod = __import__(commandsName)
-
-            self.commands = sys.modules.pop(commandsName)
-            sys.modules[commandsName] = self.commands
-
-            # revert the Python path
-            del sys.path[0]
-
-            if not hasattr(mod, 'mx_init'):
-                abort(commandsName + '.py in suite ' + self.name + ' must define an mx_init(suite) function')
-            if hasattr(mod, 'mx_post_parse_cmd_line'):
-                self.mx_post_parse_cmd_line = mod.mx_post_parse_cmd_line
-
-            mod.mx_init(self)
-            self.commands = mod
-
-    def _load_imports(self):
-        if exists(join(self.mxDir, 'imports')):
-            abort('multiple suites are not supported in this version of mx')
-
-    def _load_env(self):
-        e = join(self.mxDir, 'env')
-        if exists(e):
-            with open(e) as f:
-                lineNum = 0
-                for line in f:
-                    lineNum = lineNum + 1
-                    line = line.strip()
-                    if len(line) != 0 and line[0] != '#':
-                        if not '=' in line:
-                            abort(e + ':' + str(lineNum) + ': line does not match pattern "key=value"')
-                        key, value = line.split('=', 1)
-                        os.environ[key.strip()] = expandvars_in_property(value.strip())
-
-    def _post_init(self, opts):
-        self._load_projects()
-        if self.requiredMxVersion is None:
-            warn("This suite does not express any required mx version. Consider adding 'mxversion=<version>' to your projects file.")
-        elif self.requiredMxVersion > version:
-            abort("This suite requires mx version " + str(self.requiredMxVersion) + " while your current mx version is " + str(version) + ". Please update mx.")
-        # set the global data structures, checking for conflicts unless _check_global_structures is False
-        for p in self.projects:
-            existing = _projects.get(p.name)
-            if existing is not None:
-                abort('cannot override project  ' + p.name + ' in ' + p.dir + " with project of the same name in  " + existing.dir)
-            if not p.name in _opts.ignored_projects:
-                _projects[p.name] = p
-        for l in self.libs:
-            existing = _libs.get(l.name)
-            # Check that suites that define same library are consistent
-            if existing is not None and existing != l:
-                abort('inconsistent library redefinition of ' + l.name + ' in ' + existing.suite.dir + ' and ' + l.suite.dir)
-            _libs[l.name] = l
-        for l in self.jreLibs:
-            existing = _jreLibs.get(l.name)
-            # Check that suites that define same library are consistent
-            if existing is not None and existing != l:
-                abort('inconsistent JRE library redefinition of ' + l.name + ' in ' + existing.suite.dir + ' and ' + l.suite.dir)
-            _jreLibs[l.name] = l
-        for d in self.dists:
-            existing = _dists.get(d.name)
-            if existing is not None:
-                # allow redefinition, so use path from existing
-                # abort('cannot redefine distribution  ' + d.name)
-                warn('distribution ' + d.name + ' redefined')
-                d.path = existing.path
-            _dists[d.name] = d
-
-        # Remove projects and libraries that (recursively) depend on an optional library
-        # whose artifact does not exist or on a JRE library that is not present in the
-        # JDK for a project. Also remove projects whose Java compliance requirement
-        # cannot be satisfied by the configured JDKs.
-        #
-        # Removed projects and libraries are also removed from
-        # distributions in they are listed as dependencies.
-        ommittedDeps = set()
-        for d in sorted_deps(includeLibs=True):
-            if d.isLibrary():
-                if d.optional:
-                    try:
-                        d.optional = False
-                        path = d.get_path(resolve=True)
-                    except SystemExit:
-                        path = None
-                    finally:
-                        d.optional = True
-                    if not path:
-                        logv('[omitting optional library {0} as {1} does not exist]'.format(d, d.path))
-                        ommittedDeps.add(d.name)
-                        del _libs[d.name]
-                        self.libs.remove(d)
-            elif d.isProject():
-                if java(d.javaCompliance, cancel='some projects will be omitted which may result in errrors') is None:
-                    logv('[omitting project {0} as Java compliance {1} cannot be satisfied by configured JDKs]'.format(d, d.javaCompliance))
-                    ommittedDeps.add(d.name)
-                    del _projects[d.name]
-                    self.projects.remove(d)
-                else:
-                    for name in list(d.deps):
-                        jreLib = _jreLibs.get(name)
-                        if jreLib:
-                            if not jreLib.is_present_in_jdk(java(d.javaCompliance)):
-                                if jreLib.optional:
-                                    logv('[omitting project {0} as dependency {1} is missing]'.format(d, name))
-                                    ommittedDeps.add(d.name)
-                                    del _projects[d.name]
-                                    self.projects.remove(d)
-                                else:
-                                    abort('JRE library {0} required by {1} not found'.format(jreLib, d))
-                        elif not dependency(name, fatalIfMissing=False):
-                            logv('[omitting project {0} as dependency {1} is missing]'.format(d, name))
-                            ommittedDeps.add(d.name)
-                            del _projects[d.name]
-                            self.projects.remove(d)
-        for dist in _dists.itervalues():
-            for name in list(dist.deps):
-                if name in ommittedDeps:
-                    logv('[omitting {0} from distribution {1}]'.format(name, dist))
-                    dist.deps.remove(name)
-
-        if hasattr(self, 'mx_post_parse_cmd_line'):
-            self.mx_post_parse_cmd_line(opts)
-
-class XMLElement(xml.dom.minidom.Element):
-    def writexml(self, writer, indent="", addindent="", newl=""):
-        writer.write(indent + "<" + self.tagName)
-
-        attrs = self._get_attributes()
-        a_names = attrs.keys()
-        a_names.sort()
-
-        for a_name in a_names:
-            writer.write(" %s=\"" % a_name)
-            xml.dom.minidom._write_data(writer, attrs[a_name].value)
-            writer.write("\"")
-        if self.childNodes:
-            if not self.ownerDocument.padTextNodeWithoutSiblings and len(self.childNodes) == 1 and isinstance(self.childNodes[0], xml.dom.minidom.Text):
-                # if the only child of an Element node is a Text node, then the
-                # text is printed without any indentation or new line padding
-                writer.write(">")
-                self.childNodes[0].writexml(writer)
-                writer.write("</%s>%s" % (self.tagName, newl))
-            else:
-                writer.write(">%s" % (newl))
-                for node in self.childNodes:
-                    node.writexml(writer, indent + addindent, addindent, newl)
-                writer.write("%s</%s>%s" % (indent, self.tagName, newl))
-        else:
-            writer.write("/>%s" % (newl))
-
-class XMLDoc(xml.dom.minidom.Document):
-
-    def __init__(self):
-        xml.dom.minidom.Document.__init__(self)
-        self.current = self
-        self.padTextNodeWithoutSiblings = False
-
-    def createElement(self, tagName):
-        # overwritten to create XMLElement
-        e = XMLElement(tagName)
-        e.ownerDocument = self
-        return e
-
-    def comment(self, txt):
-        self.current.appendChild(self.createComment(txt))
-
-    def open(self, tag, attributes=None, data=None):
-        if attributes is None:
-            attributes = {}
-        element = self.createElement(tag)
-        for key, value in attributes.items():
-            element.setAttribute(key, value)
-        self.current.appendChild(element)
-        self.current = element
-        if data is not None:
-            element.appendChild(self.createTextNode(data))
-        return self
-
-    def close(self, tag):
-        assert self.current != self
-        assert tag == self.current.tagName, str(tag) + ' != ' + self.current.tagName
-        self.current = self.current.parentNode
-        return self
-
-    def element(self, tag, attributes=None, data=None):
-        if attributes is None:
-            attributes = {}
-        return self.open(tag, attributes, data).close(tag)
-
-    def xml(self, indent='', newl='', escape=False, standalone=None):
-        assert self.current == self
-        result = self.toprettyxml(indent, newl, encoding="UTF-8")
-        if escape:
-            entities = {'"':  "&quot;", "'":  "&apos;", '\n': '&#10;'}
-            result = xml.sax.saxutils.escape(result, entities)
-        if standalone is not None:
-            result = result.replace('encoding="UTF-8"?>', 'encoding="UTF-8" standalone="' + str(standalone) + '"?>')
-        return result
-
-def get_jython_os():
-    from java.lang import System as System
-    os_name = System.getProperty('os.name').lower()
-    if System.getProperty('isCygwin'):
-        return 'cygwin'
-    elif os_name.startswith('mac'):
-        return 'darwin'
-    elif os_name.startswith('linux'):
-        return 'linux'
-    elif os_name.startswith('sunos'):
-        return 'solaris'
-    elif os_name.startswith('win'):
-        return 'windows'
-    else:
-        abort('Unknown operating system ' + os_name)
-
-def get_os():
-    """
-    Get a canonical form of sys.platform.
-    """
-    if is_jython():
-        return get_jython_os()
-    elif sys.platform.startswith('darwin'):
-        return 'darwin'
-    elif sys.platform.startswith('linux'):
-        return 'linux'
-    elif sys.platform.startswith('sunos'):
-        return 'solaris'
-    elif sys.platform.startswith('win32'):
-        return 'windows'
-    elif sys.platform.startswith('cygwin'):
-        return 'cygwin'
-    else:
-        abort('Unknown operating system ' + sys.platform)
-
-def _cygpathU2W(p):
-    """
-    Translate a path from unix-style to windows-style.
-    This method has no effects on other platforms than cygwin.
-    """
-    if p is None or get_os() != "cygwin":
-        return p
-    return subprocess.check_output(['cygpath', '-a', '-w', p]).strip()
-
-def _cygpathW2U(p):
-    """
-    Translate a path from windows-style to unix-style.
-    This method has no effects on other platforms than cygwin.
-    """
-    if p is None or get_os() != "cygwin":
-        return p
-    return subprocess.check_output(['cygpath', '-a', '-u', p]).strip()
-
-def _separatedCygpathU2W(p):
-    """
-    Translate a group of paths, separated by a path separator.
-    unix-style to windows-style.
-    This method has no effects on other platforms than cygwin.
-    """
-    if p is None or p == "" or get_os() != "cygwin":
-        return p
-    return ';'.join(map(_cygpathU2W, p.split(os.pathsep)))
-
-def _separatedCygpathW2U(p):
-    """
-    Translate a group of paths, separated by a path separator.
-    windows-style to unix-style.
-    This method has no effects on other platforms than cygwin.
-    """
-    if p is None or p == "" or get_os() != "cygwin":
-        return p
-    return os.pathsep.join(map(_cygpathW2U, p.split(';')))
-
-def get_arch():
-    machine = platform.uname()[4]
-    if machine in ['amd64', 'AMD64', 'x86_64', 'i86pc']:
-        return 'amd64'
-    if machine in ['sun4v', 'sun4u']:
-        return 'sparcv9'
-    if machine == 'i386' and get_os() == 'darwin':
-        try:
-            # Support for Snow Leopard and earlier version of MacOSX
-            if subprocess.check_output(['sysctl', '-n', 'hw.cpu64bit_capable']).strip() == '1':
-                return 'amd64'
-        except OSError:
-            # sysctl is not available
-            pass
-    abort('unknown or unsupported architecture: os=' + get_os() + ', machine=' + machine)
-
-def _loadSuite(mxDir, primary=False):
-    """
-    Load a suite from 'mxDir'.
-    """
-    for s in _suites.itervalues():
-        if s.mxDir == mxDir:
-            return s
-    # create the new suite
-    s = Suite(mxDir, primary)
-    return s
-
-def suites(opt_limit_to_suite=False):
-    """
-    Get the list of all loaded suites.
-    """
-    return _suites.values()
-
-def suite(name, fatalIfMissing=True):
-    """
-    Get the suite for a given name.
-    """
-    s = _suites.get(name)
-    if s is None and fatalIfMissing:
-        abort('suite named ' + name + ' not found')
-    return s
-
-
-def projects_from_names(projectNames):
-    """
-    Get the list of projects corresponding to projectNames; all projects if None
-    """
-    if projectNames is None:
-        return projects()
-    else:
-        return [project(name) for name in projectNames]
-
-def projects(opt_limit_to_suite=False):
-    """
-    Get the list of all loaded projects limited by --suite option if opt_limit_to_suite == True
-    """
-    sortedProjects = sorted(_projects.values(), key=lambda p: p.name)
-    if opt_limit_to_suite:
-        return _projects_opt_limit_to_suites(sortedProjects)
-    else:
-        return sortedProjects
-
-def projects_opt_limit_to_suites():
-    """
-    Get the list of all loaded projects optionally limited by --suite option
-    """
-    return projects(True)
-
-def _projects_opt_limit_to_suites(projects):
-    return projects
-
-def annotation_processors():
-    """
-    Get the list of all loaded projects that define an annotation processor.
-    """
-    global _annotationProcessors
-    if _annotationProcessors is None:
-        aps = set()
-        for p in projects():
-            for ap in p.annotation_processors():
-                if project(ap, False):
-                    aps.add(ap)
-        _annotationProcessors = list(aps)
-    return _annotationProcessors
-
-def distribution(name, fatalIfMissing=True):
-    """
-    Get the distribution for a given name. This will abort if the named distribution does
-    not exist and 'fatalIfMissing' is true.
-    """
-    d = _dists.get(name)
-    if d is None and fatalIfMissing:
-        abort('distribution named ' + name + ' not found')
-    return d
-
-def dependency(name, fatalIfMissing=True):
-    """
-    Get the project or library for a given name. This will abort if a project  or library does
-    not exist for 'name' and 'fatalIfMissing' is true.
-    """
-    d = _projects.get(name)
-    if d is None:
-        d = _libs.get(name)
-        if d is None:
-            d = _jreLibs.get(name)
-    if d is None and fatalIfMissing:
-        if name in _opts.ignored_projects:
-            abort('project named ' + name + ' is ignored')
-        abort('project or library named ' + name + ' not found')
-    return d
-
-def project(name, fatalIfMissing=True):
-    """
-    Get the project for a given name. This will abort if the named project does
-    not exist and 'fatalIfMissing' is true.
-    """
-    p = _projects.get(name)
-    if p is None and fatalIfMissing:
-        if name in _opts.ignored_projects:
-            abort('project named ' + name + ' is ignored')
-        abort('project named ' + name + ' not found')
-    return p
-
-def library(name, fatalIfMissing=True):
-    """
-    Gets the library for a given name. This will abort if the named library does
-    not exist and 'fatalIfMissing' is true.
-    """
-    l = _libs.get(name)
-    if l is None and fatalIfMissing:
-        if _projects.get(name):
-            abort(name + ' is a project, not a library')
-        abort('library named ' + name + ' not found')
-    return l
-
-def _as_classpath(deps, resolve):
-    cp = []
-    if _opts.cp_prefix is not None:
-        cp = [_opts.cp_prefix]
-    for d in deps:
-        d.append_to_classpath(cp, resolve)
-    if _opts.cp_suffix is not None:
-        cp += [_opts.cp_suffix]
-    return os.pathsep.join(cp)
-
-def classpath(names=None, resolve=True, includeSelf=True, includeBootClasspath=False):
-    """
-    Get the class path for a list of given dependencies and distributions, resolving each entry in the
-    path (e.g. downloading a missing library) if 'resolve' is true.
-    """
-    if names is None:
-        deps = sorted_deps(includeLibs=True)
-        dists = list(_dists.values())
-    else:
-        deps = []
-        dists = []
-        if isinstance(names, types.StringTypes):
-            names = [names]
-        for n in names:
-            dep = dependency(n, fatalIfMissing=False)
-            if dep:
-                dep.all_deps(deps, True, includeSelf)
-            else:
-                dist = distribution(n)
-                if not dist:
-                    abort('project, library or distribution named ' + n + ' not found')
-                dists.append(dist)
-
-    if len(dists):
-        distsDeps = set()
-        for d in dists:
-            distsDeps.update(d.sorted_deps())
-
-        # remove deps covered by a dist that will be on the class path
-        deps = [d for d in deps if d not in distsDeps]
-
-    result = _as_classpath(deps, resolve)
-
-    # prepend distributions
-    if len(dists):
-        distsCp = os.pathsep.join(dist.path for dist in dists)
-        if len(result):
-            result = distsCp + os.pathsep + result
-        else:
-            result = distsCp
-
-    if includeBootClasspath:
-        result = os.pathsep.join([java().bootclasspath(), result])
-
-    return result
-
-def classpath_walk(names=None, resolve=True, includeSelf=True, includeBootClasspath=False):
-    """
-    Walks the resources available in a given classpath, yielding a tuple for each resource
-    where the first member of the tuple is a directory path or ZipFile object for a
-    classpath entry and the second member is the qualified path of the resource relative
-    to the classpath entry.
-    """
-    cp = classpath(names, resolve, includeSelf, includeBootClasspath)
-    for entry in cp.split(os.pathsep):
-        if not exists(entry):
-            continue
-        if isdir(entry):
-            for root, dirs, files in os.walk(entry):
-                for d in dirs:
-                    entryPath = join(root[len(entry) + 1:], d)
-                    yield entry, entryPath
-                for f in files:
-                    entryPath = join(root[len(entry) + 1:], f)
-                    yield entry, entryPath
-        elif entry.endswith('.jar') or entry.endswith('.zip'):
-            with zipfile.ZipFile(entry, 'r') as zf:
-                for zi in zf.infolist():
-                    entryPath = zi.filename
-                    yield zf, entryPath
-
-def sorted_deps(projectNames=None, includeLibs=False, includeJreLibs=False, includeAnnotationProcessors=False):
-    """
-    Gets projects and libraries sorted such that dependencies
-    are before the projects that depend on them. Unless 'includeLibs' is
-    true, libraries are omitted from the result.
-    """
-    projects = projects_from_names(projectNames)
-
-    return sorted_project_deps(projects, includeLibs=includeLibs, includeJreLibs=includeJreLibs, includeAnnotationProcessors=includeAnnotationProcessors)
-
-def sorted_dists():
-    """
-    Gets distributions sorted such that each distribution comes after
-    any distributions it depends upon.
-    """
-    dists = []
-    def add_dist(dist):
-        if not dist in dists:
-            for depDist in [distribution(name) for name in dist.distDependencies]:
-                add_dist(depDist)
-            if not dist in dists:
-                dists.append(dist)
-
-    for d in _dists.itervalues():
-        add_dist(d)
-    return dists
-
-def sorted_project_deps(projects, includeLibs=False, includeJreLibs=False, includeAnnotationProcessors=False):
-    deps = []
-    for p in projects:
-        p.all_deps(deps, includeLibs=includeLibs, includeJreLibs=includeJreLibs, includeAnnotationProcessors=includeAnnotationProcessors)
-    return deps
-
-class ArgParser(ArgumentParser):
-    # Override parent to append the list of available commands
-    def format_help(self):
-        return ArgumentParser.format_help(self) + _format_commands()
-
-
-    def __init__(self):
-        self.java_initialized = False
-        # this doesn't resolve the right way, but can't figure out how to override _handle_conflict_resolve in _ActionsContainer
-        ArgumentParser.__init__(self, prog='mx', conflict_handler='resolve')
-
-        self.add_argument('-v', action='store_true', dest='verbose', help='enable verbose output')
-        self.add_argument('-V', action='store_true', dest='very_verbose', help='enable very verbose output')
-        self.add_argument('-w', action='store_true', dest='warn', help='enable warning messages')
-        self.add_argument('-p', '--primary-suite-path', help='set the primary suite directory', metavar='<path>')
-        self.add_argument('--dbg', type=int, dest='java_dbg_port', help='make Java processes wait on <port> for a debugger', metavar='<port>')
-        self.add_argument('-d', action='store_const', const=8000, dest='java_dbg_port', help='alias for "-dbg 8000"')
-        self.add_argument('--attach', dest='attach', help='Connect to existing server running at [<address>:]<port>')
-        self.add_argument('--backup-modified', action='store_true', help='backup generated files if they pre-existed and are modified')
-        self.add_argument('--cp-pfx', dest='cp_prefix', help='class path prefix', metavar='<arg>')
-        self.add_argument('--cp-sfx', dest='cp_suffix', help='class path suffix', metavar='<arg>')
-        self.add_argument('--J', dest='java_args', help='Java VM arguments (e.g. --J @-dsa)', metavar='@<args>')
-        self.add_argument('--Jp', action='append', dest='java_args_pfx', help='prefix Java VM arguments (e.g. --Jp @-dsa)', metavar='@<args>', default=[])
-        self.add_argument('--Ja', action='append', dest='java_args_sfx', help='suffix Java VM arguments (e.g. --Ja @-dsa)', metavar='@<args>', default=[])
-        self.add_argument('--user-home', help='users home directory', metavar='<path>', default=os.path.expanduser('~'))
-        self.add_argument('--java-home', help='primary JDK directory (must be JDK 7 or later)', metavar='<path>')
-        self.add_argument('--extra-java-homes', help='secondary JDK directories separated by "' + os.pathsep + '"', metavar='<path>')
-        self.add_argument('--strict-compliance', action='store_true', dest='strict_compliance', help='Projects with an explicit compliance will only be built if a JDK exactly matching the compliance is available', default=False)
-        self.add_argument('--ignore-project', action='append', dest='ignored_projects', help='name of project to ignore', metavar='<name>', default=[])
-        self.add_argument('--kill-with-sigquit', action='store_true', dest='killwithsigquit', help='send sigquit first before killing child processes')
-        if get_os() != 'windows':
-            # Time outs are (currently) implemented with Unix specific functionality
-            self.add_argument('--timeout', help='timeout (in seconds) for command', type=int, default=0, metavar='<secs>')
-            self.add_argument('--ptimeout', help='timeout (in seconds) for subprocesses', type=int, default=0, metavar='<secs>')
-
-    def _parse_cmd_line(self, args=None):
-        if args is None:
-            args = sys.argv[1:]
-
-        self.add_argument('commandAndArgs', nargs=REMAINDER, metavar='command args...')
-
-        opts = self.parse_args()
-
-        global _opts
-        _opts = opts
-
-        # Give the timeout options a default value to avoid the need for hasattr() tests
-        opts.__dict__.setdefault('timeout', 0)
-        opts.__dict__.setdefault('ptimeout', 0)
-
-        if opts.very_verbose:
-            opts.verbose = True
-
-        if opts.user_home is None or opts.user_home == '':
-            abort('Could not find user home. Use --user-home option or ensure HOME environment variable is set.')
-
-        if opts.java_home:
-            os.environ['JAVA_HOME'] = opts.java_home
-        os.environ['HOME'] = opts.user_home
-
-        if os.environ.get('STRICT_COMPLIANCE'):
-            _opts.strict_compliance = True
-
-        opts.ignored_projects = opts.ignored_projects + os.environ.get('IGNORED_PROJECTS', '').split(',')
-
-        commandAndArgs = opts.__dict__.pop('commandAndArgs')
-        return opts, commandAndArgs
-
-    def _handle_conflict_resolve(self, action, conflicting_actions):
-        self._handle_conflict_error(action, conflicting_actions)
-
-def _format_commands():
-    msg = '\navailable commands:\n\n'
-    for cmd in sorted(_commands.iterkeys()):
-        c, _ = _commands[cmd][:2]
-        doc = c.__doc__
-        if doc is None:
-            doc = ''
-        msg += ' {0:<20} {1}\n'.format(cmd, doc.split('\n', 1)[0])
-    return msg + '\n'
-
-_canceled_java_requests = set()
-
-def java(requiredCompliance=None, purpose=None, cancel=None):
-    """
-    Get a JavaConfig object containing Java commands launch details.
-    If requiredCompliance is None, the compliance level specified by --java-home/JAVA_HOME
-    is returned. Otherwise, the JavaConfig exactly matching requiredCompliance is returned
-    or None if there is no exact match.
-    """
-
-    global _default_java_home
-    if cancel and (requiredCompliance, purpose) in _canceled_java_requests:
-        return None
-
-    if not requiredCompliance:
-        if not _default_java_home:
-            _default_java_home = _find_jdk(purpose=purpose, cancel=cancel)
-            if not _default_java_home:
-                assert cancel
-                _canceled_java_requests.add((requiredCompliance, purpose))
-        return _default_java_home
-
-    if _opts.strict_compliance:
-        complianceCheck = requiredCompliance.exactMatch
-        desc = str(requiredCompliance)
-    else:
-        compVersion = VersionSpec(str(requiredCompliance))
-        complianceCheck = lambda version: version >= compVersion
-        desc = '>=' + str(requiredCompliance)
-
-    for java in _extra_java_homes:
-        if complianceCheck(java.version):
-            return java
-
-    jdk = _find_jdk(versionCheck=complianceCheck, versionDescription=desc, purpose=purpose, cancel=cancel)
-    if jdk:
-        assert jdk not in _extra_java_homes
-        _extra_java_homes.append(jdk)
-    else:
-        assert cancel
-        _canceled_java_requests.add((requiredCompliance, purpose))
-    return jdk
-
-def java_version(versionCheck, versionDescription=None, purpose=None):
-    if _default_java_home and versionCheck(_default_java_home.version):
-        return _default_java_home
-    for java in _extra_java_homes:
-        if versionCheck(java.version):
-            return java
-    jdk = _find_jdk(versionCheck, versionDescription, purpose)
-    assert jdk not in _extra_java_homes
-    _extra_java_homes.append(jdk)
-    return jdk
-
-def _find_jdk(versionCheck=None, versionDescription=None, purpose=None, cancel=None):
-    if not versionCheck:
-        versionCheck = lambda v: True
-    assert not versionDescription or versionCheck
-    if not versionCheck and not purpose:
-        isDefaultJdk = True
-    else:
-        isDefaultJdk = False
-
-    candidateJdks = []
-    source = ''
-    if _opts.java_home:
-        candidateJdks.append(_opts.java_home)
-        source = '--java-home'
-    elif os.environ.get('JAVA_HOME'):
-        candidateJdks.append(os.environ.get('JAVA_HOME'))
-        source = 'JAVA_HOME'
-
-    result = _find_jdk_in_candidates(candidateJdks, versionCheck, warn=True, source=source)
-    if result:
-        return result
-
-    candidateJdks = []
-
-    if _opts.extra_java_homes:
-        candidateJdks += _opts.extra_java_homes.split(os.pathsep)
-        source = '--extra-java-homes'
-    elif os.environ.get('EXTRA_JAVA_HOMES'):
-        candidateJdks += os.environ.get('EXTRA_JAVA_HOMES').split(os.pathsep)
-        source = 'EXTRA_JAVA_HOMES'
-
-    result = _find_jdk_in_candidates(candidateJdks, versionCheck, warn=True, source=source)
-    if not result:
-        candidateJdks = []
-        source = ''
-
-        if get_os() == 'darwin':
-            base = '/Library/Java/JavaVirtualMachines'
-            if exists(base):
-                candidateJdks = [join(base, n, 'Contents/Home') for n in os.listdir(base)]
-        elif get_os() == 'linux':
-            base = '/usr/lib/jvm'
-            if exists(base):
-                candidateJdks = [join(base, n) for n in os.listdir(base)]
-            base = '/usr/java'
-            if exists(base):
-                candidateJdks += [join(base, n) for n in os.listdir(base)]
-        elif get_os() == 'solaris':
-            base = '/usr/jdk/instances'
-            if exists(base):
-                candidateJdks = [join(base, n) for n in os.listdir(base)]
-        elif get_os() == 'windows':
-            base = r'C:\Program Files\Java'
-            if exists(base):
-                candidateJdks = [join(base, n) for n in os.listdir(base)]
-
-        configs = _filtered_jdk_configs(candidateJdks, versionCheck)
-    else:
-        if not isDefaultJdk:
-            return result
-        configs = [result]
-
-    if len(configs) > 1:
-        if not is_interactive():
-            msg = "Multiple possible choices for a JDK"
-            if purpose:
-                msg += ' for' + purpose
-            msg += ': '
-            if versionDescription:
-                msg += '(' + versionDescription + ')'
-            selected = configs[0]
-            msg += ". Selecting " + str(selected)
-            log(msg)
-        else:
-            msg = 'Please select a '
-            if isDefaultJdk:
-                msg += 'default '
-            msg += 'JDK'
-            if purpose:
-                msg += ' for' + purpose
-            msg += ': '
-            if versionDescription:
-                msg += '(' + versionDescription + ')'
-            log(msg)
-            choices = configs + ['<other>']
-            if cancel:
-                choices.append('Cancel (' + cancel + ')')
-            selected = select_items(choices, allowMultiple=False)
-            if isinstance(selected, types.StringTypes) and selected == '<other>':
-                selected = None
-            if isinstance(selected, types.StringTypes) and selected == 'Cancel (' + cancel + ')':
-                return None
-    elif len(configs) == 1:
-        selected = configs[0]
-        msg = 'Selected ' + str(selected) + ' as '
-        if isDefaultJdk:
-            msg += 'default'
-        msg += 'JDK'
-        if versionDescription:
-            msg = msg + ' ' + versionDescription
-        if purpose:
-            msg += ' for' + purpose
-        log(msg)
-    else:
-        msg = 'Could not find any JDK'
-        if purpose:
-            msg += ' for' + purpose
-        msg += ' '
-        if versionDescription:
-            msg = msg + '(' + versionDescription + ')'
-        log(msg)
-        selected = None
-
-    while not selected:
-        jdkLocation = raw_input('Enter path of JDK: ')
-        selected = _find_jdk_in_candidates([jdkLocation], versionCheck, warn=True)
-
-    varName = 'JAVA_HOME' if isDefaultJdk else 'EXTRA_JAVA_HOMES'
-    allowMultiple = not isDefaultJdk
-    envPath = join(_primary_suite.mxDir, 'env')
-    if is_interactive() and ask_yes_no('Persist this setting by adding "{0}={1}" to {2}'.format(varName, selected.jdk, envPath), 'y'):
-        envLines = []
-        with open(envPath) as fp:
-            append = True
-            for line in fp:
-                if line.rstrip().startswith(varName):
-                    _, currentValue = line.split('=', 1)
-                    currentValue = currentValue.strip()
-                    if not allowMultiple and currentValue:
-                        if not ask_yes_no('{0} is already set to {1}, overwrite with {2}?'.format(varName, currentValue, selected.jdk), 'n'):
-                            return selected
-                        else:
-                            line = varName + '=' + selected.jdk + os.linesep
-                    else:
-                        line = line.rstrip()
-                        if currentValue:
-                            line += os.pathsep
-                        line += selected.jdk + os.linesep
-                    append = False
-                envLines.append(line)
-        if append:
-            envLines.append(varName + '=' + selected.jdk)
-
-        with open(envPath, 'w') as fp:
-            for line in envLines:
-                fp.write(line)
-
-    if varName == 'JAVA_HOME':
-        os.environ['JAVA_HOME'] = selected.jdk
-
-    return selected
-
-def is_interactive():
-    return sys.__stdin__.isatty()
-
-def _filtered_jdk_configs(candidates, versionCheck, warn=False, source=None):
-    filtered = []
-    for candidate in candidates:
-        try:
-            config = JavaConfig(candidate)
-            if versionCheck(config.version):
-                filtered.append(config)
-        except JavaConfigException as e:
-            if warn:
-                log('Path in ' + source + "' is not pointing to a JDK (" + e.message + ")")
-    return filtered
-
-def _find_jdk_in_candidates(candidates, versionCheck, warn=False, source=None):
-    filtered = _filtered_jdk_configs(candidates, versionCheck, warn, source)
-    if filtered:
-        return filtered[0]
-    return None
-
-
-def run_java(args, nonZeroIsFatal=True, out=None, err=None, cwd=None, addDefaultArgs=True, javaConfig=None):
-    if not javaConfig:
-        javaConfig = java()
-    return run(javaConfig.format_cmd(args, addDefaultArgs), nonZeroIsFatal=nonZeroIsFatal, out=out, err=err, cwd=cwd)
-
-def _kill_process_group(pid, sig):
-    if not sig:
-        sig = signal.SIGKILL
-    pgid = os.getpgid(pid)
-    try:
-        os.killpg(pgid, sig)
-        return True
-    except:
-        log('Error killing subprocess ' + str(pgid) + ': ' + str(sys.exc_info()[1]))
-        return False
-
-def _waitWithTimeout(process, args, timeout):
-    def _waitpid(pid):
-        while True:
-            try:
-                return os.waitpid(pid, os.WNOHANG)
-            except OSError, e:
-                if e.errno == errno.EINTR:
-                    continue
-                raise
-
-    def _returncode(status):
-        if os.WIFSIGNALED(status):
-            return -os.WTERMSIG(status)
-        elif os.WIFEXITED(status):
-            return os.WEXITSTATUS(status)
-        else:
-            # Should never happen
-            raise RuntimeError("Unknown child exit status!")
-
-    end = time.time() + timeout
-    delay = 0.0005
-    while True:
-        (pid, status) = _waitpid(process.pid)
-        if pid == process.pid:
-            return _returncode(status)
-        remaining = end - time.time()
-        if remaining <= 0:
-            abort('Process timed out after {0} seconds: {1}'.format(timeout, ' '.join(args)))
-        delay = min(delay * 2, remaining, .05)
-        time.sleep(delay)
-
-# Makes the current subprocess accessible to the abort() function
-# This is a list of tuples of the subprocess.Popen or
-# multiprocessing.Process object and args.
-_currentSubprocesses = []
-
-def _addSubprocess(p, args):
-    entry = (p, args)
-    _currentSubprocesses.append(entry)
-    return entry
-
-def _removeSubprocess(entry):
-    if entry and entry in _currentSubprocesses:
-        try:
-            _currentSubprocesses.remove(entry)
-        except:
-            pass
-
-def waitOn(p):
-    if get_os() == 'windows':
-        # on windows use a poll loop, otherwise signal does not get handled
-        retcode = None
-        while retcode == None:
-            retcode = p.poll()
-            time.sleep(0.05)
-    else:
-        retcode = p.wait()
-    return retcode
-
-def run(args, nonZeroIsFatal=True, out=None, err=None, cwd=None, timeout=None, env=None):
-    """
-    Run a command in a subprocess, wait for it to complete and return the exit status of the process.
-    If the exit status is non-zero and `nonZeroIsFatal` is true, then mx is exited with
-    the same exit status.
-    Each line of the standard output and error streams of the subprocess are redirected to
-    out and err if they are callable objects.
-    """
-
-    assert isinstance(args, types.ListType), "'args' must be a list: " + str(args)
-    for arg in args:
-        assert isinstance(arg, types.StringTypes), 'argument is not a string: ' + str(arg)
-
-    if env is None:
-        env = os.environ.copy()
-
-    # Ideally the command line could be communicated directly in an environment
-    # variable. However, since environment variables share the same resource
-    # space as the command line itself (on Unix at least), this would cause the
-    # limit to be exceeded too easily.
-    with tempfile.NamedTemporaryFile(suffix='', prefix='mx_subprocess_command.', mode='w', delete=False) as fp:
-        subprocessCommandFile = fp.name
-        for arg in args:
-            # TODO: handle newlines in args once there's a use case
-            assert '\n' not in arg
-            print >> fp, arg
-    env['MX_SUBPROCESS_COMMAND_FILE'] = subprocessCommandFile
-
-    if _opts.verbose:
-        if _opts.very_verbose:
-            log('Environment variables:')
-            for key in sorted(env.keys()):
-                log('    ' + key + '=' + env[key])
-        log(' '.join(map(pipes.quote, args)))
-
-    if timeout is None and _opts.ptimeout != 0:
-        timeout = _opts.ptimeout
-
-    sub = None
-    try:
-        # On Unix, the new subprocess should be in a separate group so that a timeout alarm
-        # can use os.killpg() to kill the whole subprocess group
-        preexec_fn = None
-        creationflags = 0
-        if not is_jython():
-            if get_os() == 'windows':
-                creationflags = subprocess.CREATE_NEW_PROCESS_GROUP
-            else:
-                preexec_fn = os.setsid
-        def redirect(stream, f):
-            for line in iter(stream.readline, ''):
-                f(line)
-            stream.close()
-        stdout = out if not callable(out) else subprocess.PIPE
-        stderr = err if not callable(err) else subprocess.PIPE
-        p = subprocess.Popen(args, cwd=cwd, stdout=stdout, stderr=stderr, preexec_fn=preexec_fn, creationflags=creationflags, env=env)
-        sub = _addSubprocess(p, args)
-        joiners = []
-        if callable(out):
-            t = Thread(target=redirect, args=(p.stdout, out))
-            # Don't make the reader thread a daemon otherwise output can be droppped
-            t.start()
-            joiners.append(t)
-        if callable(err):
-            t = Thread(target=redirect, args=(p.stderr, err))
-            # Don't make the reader thread a daemon otherwise output can be droppped
-            t.start()
-            joiners.append(t)
-        while any([t.is_alive() for t in joiners]):
-            # Need to use timeout otherwise all signals (including CTRL-C) are blocked
-            # see: http://bugs.python.org/issue1167930
-            for t in joiners:
-                t.join(10)
-        if timeout is None or timeout == 0:
-            retcode = waitOn(p)
-        else:
-            if get_os() == 'windows':
-                abort('Use of timeout not (yet) supported on Windows')
-            retcode = _waitWithTimeout(p, args, timeout)
-    except OSError as e:
-        log('Error executing \'' + ' '.join(args) + '\': ' + str(e))
-        if _opts.verbose:
-            raise e
-        abort(e.errno)
-    except KeyboardInterrupt:
-        abort(1)
-    finally:
-        _removeSubprocess(sub)
-        os.remove(subprocessCommandFile)
-
-    if retcode and nonZeroIsFatal:
-        if _opts.verbose:
-            if _opts.very_verbose:
-                raise subprocess.CalledProcessError(retcode, ' '.join(args))
-            else:
-                log('[exit code: ' + str(retcode) + ']')
-        abort(retcode)
-
-    return retcode
-
-def exe_suffix(name):
-    """
-    Gets the platform specific suffix for an executable
-    """
-    if get_os() == 'windows':
-        return name + '.exe'
-    return name
-
-def add_lib_prefix(name):
-    """
-    Adds the platform specific library prefix to a name
-    """
-    os = get_os()
-    if os == 'linux' or os == 'solaris' or os == 'darwin':
-        return 'lib' + name
-    return name
-
-def add_lib_suffix(name):
-    """
-    Adds the platform specific library suffix to a name
-    """
-    os = get_os()
-    if os == 'windows':
-        return name + '.dll'
-    if os == 'linux' or os == 'solaris':
-        return name + '.so'
-    if os == 'darwin':
-        return name + '.dylib'
-    return name
-
-"""
-Utility for filtering duplicate lines.
-"""
-class DuplicateSuppressingStream:
-    """
-    Creates an object that will suppress duplicate lines sent to 'out'.
-    The lines considered for suppression are those that contain one of the
-    strings in 'restrictTo' if it is not None.
-    """
-    def __init__(self, restrictTo=None, out=sys.stdout):
-        self.restrictTo = restrictTo
-        self.seen = set()
-        self.out = out
-        self.currentFilteredLineCount = 0
-        self.currentFilteredTime = None
-
-    def isSuppressionCandidate(self, line):
-        if self.restrictTo:
-            for p in self.restrictTo:
-                if p in line:
-                    return True
-            return False
-        else:
-            return True
-
-    def write(self, line):
-        if self.isSuppressionCandidate(line):
-            if line in self.seen:
-                self.currentFilteredLineCount += 1
-                if self.currentFilteredTime:
-                    if time.time() - self.currentFilteredTime > 1 * 60:
-                        self.out.write("  Filtered " + str(self.currentFilteredLineCount) + " repeated lines...\n")
-                        self.currentFilteredTime = time.time()
-                else:
-                    self.currentFilteredTime = time.time()
-                return
-            self.seen.add(line)
-        self.currentFilteredLineCount = 0
-        self.out.write(line)
-        self.currentFilteredTime = None
-
-"""
-A JavaCompliance simplifies comparing Java compliance values extracted from a JDK version string.
-"""
-class JavaCompliance:
-    def __init__(self, ver):
-        m = re.match(r'1\.(\d+).*', ver)
-        assert m is not None, 'not a recognized version string: ' + ver
-        self.value = int(m.group(1))
-
-    def __str__(self):
-        return '1.' + str(self.value)
-
-    def __cmp__(self, other):
-        if isinstance(other, types.StringType):
-            other = JavaCompliance(other)
-
-        return cmp(self.value, other.value)
-
-    def __hash__(self):
-        return self.value.__hash__()
-
-    def exactMatch(self, version):
-        assert isinstance(version, VersionSpec)
-        return len(version.parts) > 1 and version.parts[0] == 1 and version.parts[1] == self.value
-
-"""
-A version specification as defined in JSR-56
-"""
-class VersionSpec:
-    def __init__(self, versionString):
-        validChar = r'[\x21-\x25\x27-\x29\x2c\x2f-\x5e\x60-\x7f]'
-        separator = r'[.\-_]'
-        m = re.match("^" + validChar + '+(' + separator + validChar + '+)*$', versionString)
-        assert m is not None, 'not a recognized version string: ' + versionString
-        self.versionString = versionString
-        self.parts = [int(f) if f.isdigit() else f for f in re.split(separator, versionString)]
-
-    def __str__(self):
-        return self.versionString
-
-    def __cmp__(self, other):
-        return cmp(self.parts, other.parts)
-
-def _filter_non_existant_paths(paths):
-    if paths:
-        return os.pathsep.join([path for path in _separatedCygpathW2U(paths).split(os.pathsep) if exists(path)])
-    return None
-
-class JavaConfigException(Exception):
-    def __init__(self, value):
-        Exception.__init__(self, value)
-
-"""
-A JavaConfig object encapsulates info on how Java commands are run.
-"""
-class JavaConfig:
-    def __init__(self, java_home):
-        self.jdk = java_home
-        self.jar = exe_suffix(join(self.jdk, 'bin', 'jar'))
-        self.java = exe_suffix(join(self.jdk, 'bin', 'java'))
-        self.javac = exe_suffix(join(self.jdk, 'bin', 'javac'))
-        self.javap = exe_suffix(join(self.jdk, 'bin', 'javap'))
-        self.javadoc = exe_suffix(join(self.jdk, 'bin', 'javadoc'))
-        self.pack200 = exe_suffix(join(self.jdk, 'bin', 'pack200'))
-        self.toolsjar = join(self.jdk, 'lib', 'tools.jar')
-        self._classpaths_initialized = False
-        self._bootclasspath = None
-        self._extdirs = None
-        self._endorseddirs = None
-
-        if not exists(self.java):
-            raise JavaConfigException('Java launcher does not exist: ' + self.java)
-
-        def delAtAndSplit(s):
-            return shlex.split(s.lstrip('@'))
-
-        self.java_args = delAtAndSplit(_opts.java_args) if _opts.java_args else []
-        self.java_args_pfx = sum(map(delAtAndSplit, _opts.java_args_pfx), [])
-        self.java_args_sfx = sum(map(delAtAndSplit, _opts.java_args_sfx), [])
-
-        # Prepend the -d64 VM option only if the java command supports it
-        try:
-            output = subprocess.check_output([self.java, '-d64', '-version'], stderr=subprocess.STDOUT)
-            self.java_args = ['-d64'] + self.java_args
-        except subprocess.CalledProcessError as e:
-            try:
-                output = subprocess.check_output([self.java, '-version'], stderr=subprocess.STDOUT)
-            except subprocess.CalledProcessError as e:
-                raise JavaConfigException(e.returncode + " :" + e.output)
-
-        def _checkOutput(out):
-            return 'version' in out
-
-        # hotspot can print a warning, e.g. if there's a .hotspot_compiler file in the cwd
-        output = output.split('\n')
-        version = None
-        for o in output:
-            if _checkOutput(o):
-                assert version is None
-                version = o
-
-        self.version = VersionSpec(version.split()[2].strip('"'))
-        self.javaCompliance = JavaCompliance(self.version.versionString)
-
-        attach = None
-        if _opts.attach is not None:
-            attach = 'server=n,address=' + _opts.attach
-        else:
-            if _opts.java_dbg_port is not None:
-                attach = 'server=y,address=' + str(_opts.java_dbg_port)
-
-        if attach is not None:
-            self.java_args += ['-Xdebug', '-Xrunjdwp:transport=dt_socket,' + attach + ',suspend=y']
-
-    def _init_classpaths(self):
-        if not self._classpaths_initialized:
-            _, binDir = _compile_mx_class('ClasspathDump', jdk=self)
-            self._bootclasspath, self._extdirs, self._endorseddirs = [x if x != 'null' else None for x in subprocess.check_output([self.java, '-cp', _cygpathU2W(binDir), 'ClasspathDump'], stderr=subprocess.PIPE).split('|')]
-            if self.javaCompliance <= JavaCompliance('1.8'):
-                # All 3 system properties accessed by ClasspathDump are expected to exist
-                if not self._bootclasspath or not self._extdirs or not self._endorseddirs:
-                    warn("Could not find all classpaths: boot='" + str(self._bootclasspath) + "' extdirs='" + str(self._extdirs) + "' endorseddirs='" + str(self._endorseddirs) + "'")
-            self._bootclasspath = _filter_non_existant_paths(self._bootclasspath)
-            self._extdirs = _filter_non_existant_paths(self._extdirs)
-            self._endorseddirs = _filter_non_existant_paths(self._endorseddirs)
-            self._classpaths_initialized = True
-
-    def __repr__(self):
-        return "JavaConfig(" + str(self.jdk) + ")"
-
-    def __str__(self):
-        return "Java " + str(self.version) + " (" + str(self.javaCompliance) + ") from " + str(self.jdk)
-
-    def __hash__(self):
-        return hash(self.jdk)
-
-    def __cmp__(self, other):
-        if isinstance(other, JavaConfig):
-            compilanceCmp = cmp(self.javaCompliance, other.javaCompliance)
-            if compilanceCmp:
-                return compilanceCmp
-            versionCmp = cmp(self.version, other.version)
-            if versionCmp:
-                return versionCmp
-            return cmp(self.jdk, other.jdk)
-        raise TypeError()
-
-    def format_cmd(self, args, addDefaultArgs):
-        if addDefaultArgs:
-            return [self.java] + self.processArgs(args)
-        else:
-            return [self.java] + args
-
-    def processArgs(self, args):
-        return self.java_args_pfx + self.java_args + self.java_args_sfx + args
-
-    def bootclasspath(self):
-        self._init_classpaths()
-        return _separatedCygpathU2W(self._bootclasspath)
-
-
-    """
-    Add javadoc style options for the library paths of this JDK.
-    """
-    def javadocLibOptions(self, args):
-        self._init_classpaths()
-        if args is None:
-            args = []
-        if self._bootclasspath:
-            args.append('-bootclasspath')
-            args.append(self._bootclasspath)
-        if self._extdirs:
-            args.append('-extdirs')
-            args.append(self._extdirs)
-        return args
-
-    """
-    Add javac style options for the library paths of this JDK.
-    """
-    def javacLibOptions(self, args):
-        args = self.javadocLibOptions(args)
-        if self._endorseddirs:
-            args.append('-endorseddirs')
-            args.append(self._endorseddirs)
-        return args
-
-    def containsJar(self, jar):
-        self._init_classpaths()
-
-        if self._bootclasspath:
-            for e in self._bootclasspath.split(os.pathsep):
-                if basename(e) == jar:
-                    return True
-        if self._extdirs:
-            for d in self._extdirs.split(os.pathsep):
-                if len(d) and jar in os.listdir(d):
-                    return True
-        if self._endorseddirs:
-            for d in self._endorseddirs.split(os.pathsep):
-                if len(d) and jar in os.listdir(d):
-                    return True
-        return False
-
-def check_get_env(key):
-    """
-    Gets an environment variable, aborting with a useful message if it is not set.
-    """
-    value = get_env(key)
-    if value is None:
-        abort('Required environment variable ' + key + ' must be set')
-    return value
-
-def get_env(key, default=None):
-    """
-    Gets an environment variable.
-    """
-    value = os.environ.get(key, default)
-    return value
-
-def logv(msg=None):
-    if _opts.verbose:
-        log(msg)
-
-def log(msg=None):
-    """
-    Write a message to the console.
-    All script output goes through this method thus allowing a subclass
-    to redirect it.
-    """
-    if msg is None:
-        print
-    else:
-        print msg
-
-def expand_project_in_class_path_arg(cpArg):
-    cp = []
-    for part in cpArg.split(os.pathsep):
-        if part.startswith('@'):
-            cp += classpath(part[1:]).split(os.pathsep)
-        else:
-            cp.append(part)
-    return os.pathsep.join(cp)
-
-def expand_project_in_args(args):
-    for i in range(len(args)):
-        if args[i] == '-cp' or args[i] == '-classpath':
-            if i + 1 < len(args):
-                args[i + 1] = expand_project_in_class_path_arg(args[i + 1])
-            return
-
-
-def gmake_cmd():
-    for a in ['make', 'gmake', 'gnumake']:
-        try:
-            output = subprocess.check_output([a, '--version'])
-            if 'GNU' in output:
-                return a
-        except:
-            pass
-    abort('Could not find a GNU make executable on the current path.')
-
-def expandvars_in_property(value):
-    result = expandvars(value)
-    if '$' in result or '%' in result:
-        abort('Property contains an undefined environment variable: ' + value)
-    return result
-
-def _send_sigquit():
-    for p, args in _currentSubprocesses:
-
-        def _isJava():
-            if args:
-                name = args[0].split(os.sep)[-1]
-                return name == "java"
-            return False
-
-        if p is not None and _isJava():
-            if get_os() == 'windows':
-                log("mx: implement me! want to send SIGQUIT to my child process")
-            else:
-                _kill_process_group(p.pid, sig=signal.SIGQUIT)
-            time.sleep(0.1)
-
-def abort(codeOrMessage):
-    """
-    Aborts the program with a SystemExit exception.
-    If 'codeOrMessage' is a plain integer, it specifies the system exit status;
-    if it is None, the exit status is zero; if it has another type (such as a string),
-    the object's value is printed and the exit status is one.
-    """
-
-    if _opts and _opts.killwithsigquit:
-        _send_sigquit()
-
-    def is_alive(p):
-        if isinstance(p, subprocess.Popen):
-            return p.poll() is None
-        assert is_jython() or isinstance(p, multiprocessing.Process), p
-        return p.is_alive()
-
-    for p, args in _currentSubprocesses:
-        if is_alive(p):
-            try:
-                if get_os() == 'windows':
-                    p.terminate()
-                else:
-                    _kill_process_group(p.pid, signal.SIGKILL)
-            except BaseException as e:
-                if is_alive(p):
-                    log('error while killing subprocess {0} "{1}": {2}'.format(p.pid, ' '.join(args), e))
-
-    if _opts and _opts.verbose:
-        import traceback
-        traceback.print_stack()
-    raise SystemExit(codeOrMessage)
-
-def download(path, urls, verbose=False):
-    """
-    Attempts to downloads content for each URL in a list, stopping after the first successful download.
-    If the content cannot be retrieved from any URL, the program is aborted. The downloaded content
-    is written to the file indicated by 'path'.
-    """
-    d = dirname(path)
-    if d != '' and not exists(d):
-        os.makedirs(d)
-
-    assert not path.endswith(os.sep)
-
-    _, binDir = _compile_mx_class('URLConnectionDownload')
-
-    verbose = []
-    if sys.stderr.isatty():
-        verbose.append("-v")
-    if run([java().java, '-cp', _cygpathU2W(binDir), 'URLConnectionDownload', _cygpathU2W(path)] + verbose + urls, nonZeroIsFatal=False) == 0:
-        return
-
-    abort('Could not download to ' + path + ' from any of the following URLs:\n\n    ' +
-              '\n    '.join(urls) + '\n\nPlease use a web browser to do the download manually')
-
-def update_file(path, content):
-    """
-    Updates a file with some given content if the content differs from what's in
-    the file already. The return value indicates if the file was updated.
-    """
-    existed = exists(path)
-    try:
-        old = None
-        if existed:
-            with open(path, 'rb') as f:
-                old = f.read()
-
-        if old == content:
-            return False
-
-        if existed and _opts.backup_modified:
-            shutil.move(path, path + '.orig')
-
-        with open(path, 'wb') as f:
-            f.write(content)
-
-        log(('modified ' if existed else 'created ') + path)
-        return True
-    except IOError as e:
-        abort('Error while writing to ' + path + ': ' + str(e))
-
-# Builtin commands
-
-def _defaultEcjPath():
-    return get_env('JDT', join(_primary_suite.mxDir, 'ecj.jar'))
-
-class JavaCompileTask:
-    def __init__(self, args, proj, reason, javafilelist, jdk, outputDir, jdtJar, deps):
-        self.proj = proj
-        self.reason = reason
-        self.javafilelist = javafilelist
-        self.deps = deps
-        self.jdk = jdk
-        self.outputDir = outputDir
-        self.done = False
-        self.jdtJar = jdtJar
-        self.args = args
-
-    def __str__(self):
-        return self.proj.name
-
-    def logCompilation(self, compiler):
-        log('Compiling Java sources for {0} with {1}... [{2}]'.format(self.proj.name, compiler, self.reason))
-
-    def execute(self):
-        argfileName = join(self.proj.dir, 'javafilelist.txt')
-        argfile = open(argfileName, 'wb')
-        argfile.write('\n'.join(map(_cygpathU2W, self.javafilelist)))
-        argfile.close()
-
-        processorArgs = []
-        processorPath = self.proj.annotation_processors_path()
-        if processorPath:
-            genDir = self.proj.source_gen_dir()
-            if exists(genDir):
-                shutil.rmtree(genDir)
-            os.mkdir(genDir)
-            processorArgs += ['-processorpath', _separatedCygpathU2W(join(processorPath)), '-s', _cygpathU2W(genDir)]
-        else:
-            processorArgs += ['-proc:none']
-
-        args = self.args
-        jdk = self.jdk
-        outputDir = _cygpathU2W(self.outputDir)
-        compliance = str(jdk.javaCompliance)
-        cp = _separatedCygpathU2W(classpath(self.proj.name, includeSelf=True))
-        toBeDeleted = [argfileName]
-
-        try:
-            if not self.jdtJar:
-                mainJava = java()
-                if not args.error_prone:
-                    javac = args.alt_javac if args.alt_javac else mainJava.javac
-                    self.logCompilation('javac' if not args.alt_javac else args.alt_javac)
-                    javacCmd = [javac, '-g', '-J-Xmx1g', '-source', compliance, '-target', compliance, '-classpath', cp, '-d', outputDir]
-                    jdk.javacLibOptions(javacCmd)
-
-                    attach = None
-                    if _opts.attach is not None:
-                        attach = 'server=n,address=' + _opts.attach
-                    else:
-                        if _opts.java_dbg_port is not None:
-                            attach = 'server=y,address=' + str(_opts.java_dbg_port)
-
-                    if attach is not None:
-                        javacCmd += ['-J-Xdebug', '-J-Xrunjdwp:transport=dt_socket,' + attach + ',suspend=y']
-                    javacCmd += processorArgs
-                    javacCmd += ['@' + _cygpathU2W(argfile.name)]
-
-                    if not args.warnAPI:
-                        javacCmd.append('-XDignore.symbol.file')
-                    run(javacCmd)
-                else:
-                    self.logCompilation('javac (with error-prone)')
-                    javaArgs = ['-Xmx1g']
-                    javacArgs = ['-g', '-source', compliance, '-target', compliance, '-classpath', cp, '-d', outputDir]
-                    jdk.javacLibOptions(javacCmd)
-                    javacArgs += processorArgs
-                    javacArgs += ['@' + argfile.name]
-                    if not args.warnAPI:
-                        javacArgs.append('-XDignore.symbol.file')
-                    run_java(javaArgs + ['-cp', os.pathsep.join([mainJava.toolsjar, args.error_prone]), 'com.google.errorprone.ErrorProneCompiler'] + javacArgs)
-            else:
-                self.logCompilation('JDT')
-
-                jdtVmArgs = ['-Xmx1g', '-jar', _cygpathU2W(self.jdtJar)]
-
-                jdtArgs = ['-' + compliance,
-                         '-cp', cp, '-g', '-enableJavadoc',
-                         '-d', outputDir]
-                jdk.javacLibOptions(jdtArgs)
-                jdtArgs += processorArgs
-
-                jdtProperties = join(self.proj.dir, '.settings', 'org.eclipse.jdt.core.prefs')
-                rootJdtProperties = join(self.proj.suite.mxDir, 'eclipse-settings', 'org.eclipse.jdt.core.prefs')
-                if not exists(jdtProperties) or os.path.getmtime(jdtProperties) < os.path.getmtime(rootJdtProperties):
-                    # Try to fix a missing properties file by running eclipseinit
-                    _eclipseinit_project(self.proj)
-                if not exists(jdtProperties):
-                    log('JDT properties file {0} not found'.format(jdtProperties))
-                else:
-                    with open(jdtProperties) as fp:
-                        origContent = fp.read()
-                        content = origContent
-                        if self.proj.uses_annotation_processor_library():
-                            # unfortunately, the command line compiler doesn't let us ignore warnings for generated files only
-                            content = content.replace('=warning', '=ignore')
-                        elif args.jdt_warning_as_error:
-                            content = content.replace('=warning', '=error')
-                        if not args.jdt_show_task_tags:
-                            content = content + '\norg.eclipse.jdt.core.compiler.problem.tasks=ignore'
-                    if origContent != content:
-                        jdtPropertiesTmp = jdtProperties + '.tmp'
-                        with open(jdtPropertiesTmp, 'w') as fp:
-                            fp.write(content)
-                        toBeDeleted.append(jdtPropertiesTmp)
-                        jdtArgs += ['-properties', _cygpathU2W(jdtPropertiesTmp)]
-                    else:
-                        jdtArgs += ['-properties', _cygpathU2W(jdtProperties)]
-                jdtArgs.append('@' + _cygpathU2W(argfile.name))
-
-                run_java(jdtVmArgs + jdtArgs)
-
-            # Create annotation processor jar for a project that defines annotation processors
-            if self.proj.definedAnnotationProcessorsDist:
-                self.proj.definedAnnotationProcessorsDist.make_archive()
-
-        finally:
-            # Do not clean up temp files if verbose as there's
-            # a good chance the user wants to copy and paste the
-            # Java compiler command directly
-            if not _opts.verbose:
-                for n in toBeDeleted:
-                    os.remove(n)
-
-            self.done = True
-
-def build(args, parser=None):
-    """compile the Java and C sources, linking the latter
-
-    Compile all the Java source code using the appropriate compilers
-    and linkers for the various source code types."""
-
-    suppliedParser = parser is not None
-    if not suppliedParser:
-        parser = ArgumentParser(prog='mx build')
-
-    parser = parser if parser is not None else ArgumentParser(prog='mx build')
-    parser.add_argument('-f', action='store_true', dest='force', help='force build (disables timestamp checking)')
-    parser.add_argument('-c', action='store_true', dest='clean', help='removes existing build output')
-    parser.add_argument('-p', action='store_true', dest='parallelize', help='parallelizes Java compilation if possible')
-    parser.add_argument('--source', dest='compliance', help='Java compliance level for projects without an explicit one')
-    parser.add_argument('--Wapi', action='store_true', dest='warnAPI', help='show warnings about using internal APIs')
-    parser.add_argument('--check-distributions', action='store_true', dest='check_distributions', help='check built distributions for overlap')
-    parser.add_argument('--projects', action='store', help='comma separated projects to build (omit to build all projects)')
-    parser.add_argument('--only', action='store', help='comma separated projects to build, without checking their dependencies (omit to build all projects)')
-    parser.add_argument('--no-java', action='store_false', dest='java', help='do not build Java projects')
-    parser.add_argument('--no-native', action='store_false', dest='native', help='do not build native projects')
-    parser.add_argument('--jdt-warning-as-error', action='store_true', help='convert all Eclipse batch compiler warnings to errors')
-    parser.add_argument('--jdt-show-task-tags', action='store_true', help='show task tags as Eclipse batch compiler warnings')
-    parser.add_argument('--alt-javac', dest='alt_javac', help='path to alternative javac executable', metavar='<path>')
-    compilerSelect = parser.add_mutually_exclusive_group()
-    compilerSelect.add_argument('--error-prone', dest='error_prone', help='path to error-prone.jar', metavar='<path>')
-    compilerSelect.add_argument('--jdt', help='path to ecj.jar, the Eclipse batch compiler', default=_defaultEcjPath(), metavar='<path>')
-    compilerSelect.add_argument('--force-javac', action='store_true', dest='javac', help='use javac whether ecj.jar is found or not')
-
-    if suppliedParser:
-        parser.add_argument('remainder', nargs=REMAINDER, metavar='...')
-
-    args = parser.parse_args(args)
-
-    if is_jython():
-        if args.parallelize:
-            logv('[multiprocessing not available in jython]')
-            args.parallelize = False
-
-    jdtJar = None
-    if not args.javac and args.jdt is not None:
-        if not args.jdt.endswith('.jar'):
-            abort('Path for Eclipse batch compiler does not look like a jar file: ' + args.jdt)
-        jdtJar = args.jdt
-        if not exists(jdtJar):
-            if os.path.abspath(jdtJar) == os.path.abspath(_defaultEcjPath()) and get_env('JDT', None) is None:
-                # Silently ignore JDT if default location is used but does not exist
-                jdtJar = None
-            else:
-                abort('Eclipse batch compiler jar does not exist: ' + args.jdt)
-
-    if args.only is not None:
-        # N.B. This build will not include dependencies including annotation processor dependencies
-        sortedProjects = [project(name) for name in args.only.split(',')]
-    else:
-        if args.projects is not None:
-            projectNames = args.projects.split(',')
-        else:
-            projectNames = None
-
-        projects = _projects_opt_limit_to_suites(projects_from_names(projectNames))
-        # N.B. Limiting to a suite only affects the starting set of projects. Dependencies in other suites will still be compiled
-        sortedProjects = sorted_project_deps(projects, includeAnnotationProcessors=True)
-
-    if args.java and jdtJar:
-        ideinit([], refreshOnly=True, buildProcessorJars=False)
-
-    tasks = {}
-    updatedAnnotationProcessorDists = set()
-    for p in sortedProjects:
-        if p.native:
-            if args.native:
-                log('Calling GNU make {0}...'.format(p.dir))
-
-                if args.clean:
-                    run([gmake_cmd(), 'clean'], cwd=p.dir)
-
-                run([gmake_cmd()], cwd=p.dir)
-            continue
-        else:
-            if not args.java:
-                continue
-            if exists(join(p.dir, 'plugin.xml')):  # eclipse plugin project
-                continue
-
-        # skip building this Java project if its Java compliance level is "higher" than the configured JDK
-        requiredCompliance = p.javaCompliance if p.javaCompliance else JavaCompliance(args.compliance) if args.compliance else None
-        jdk = java(requiredCompliance)
-
-        outputDir = p.output_dir()
-
-        sourceDirs = p.source_dirs()
-        buildReason = None
-        if args.force:
-            buildReason = 'forced build'
-        elif args.clean:
-            buildReason = 'clean'
-
-        taskDeps = []
-        for dep in p.all_deps([], includeLibs=False, includeAnnotationProcessors=True):
-            taskDep = tasks.get(dep.name)
-            if taskDep:
-                if not buildReason:
-                    buildReason = dep.name + ' rebuilt'
-                taskDeps.append(taskDep)
-
-        javafilelist = []
-        nonjavafiletuples = []
-        for sourceDir in sourceDirs:
-            for root, _, files in os.walk(sourceDir):
-                javafiles = [join(root, name) for name in files if name.endswith('.java')]
-                javafilelist += javafiles
-
-                nonjavafiletuples += [(sourceDir, [join(root, name) for name in files if not name.endswith('.java')])]
-
-                if not buildReason:
-                    for javafile in javafiles:
-                        classfile = TimeStampFile(outputDir + javafile[len(sourceDir):-len('java')] + 'class')
-                        if not classfile.exists() or classfile.isOlderThan(javafile):
-                            if basename(classfile.path) != 'package-info.class':
-                                buildReason = 'class file(s) out of date'
-                                break
-
-        apsOutOfDate = p.update_current_annotation_processors_file()
-        if apsOutOfDate:
-            buildReason = 'annotation processor(s) changed'
-
-        if not buildReason:
-            logv('[all class files for {0} are up to date - skipping]'.format(p.name))
-            _handleNonJavaFiles(outputDir, p, False, nonjavafiletuples)
-            continue
-
-        _handleNonJavaFiles(outputDir, p, True, nonjavafiletuples)
-
-        if len(javafilelist) == 0:
-            logv('[no Java sources for {0} - skipping]'.format(p.name))
-            continue
-
-        javafilelist = sorted(javafilelist)
-
-        task = JavaCompileTask(args, p, buildReason, javafilelist, jdk, outputDir, jdtJar, taskDeps)
-        if p.definedAnnotationProcessorsDist:
-            updatedAnnotationProcessorDists.add(p.definedAnnotationProcessorsDist)
-
-        tasks[p.name] = task
-        if args.parallelize:
-            # Best to initialize class paths on main process
-            jdk.bootclasspath()
-            task.proc = None
-        else:
-            task.execute()
-
-    if args.parallelize:
-
-        def joinTasks(tasks):
-            failed = []
-            for t in tasks:
-                t.proc.join()
-                _removeSubprocess(t.sub)
-                if t.proc.exitcode != 0:
-                    failed.append(t)
-            return failed
-
-        def checkTasks(tasks):
-            active = []
-            for t in tasks:
-                if t.proc.is_alive():
-                    active.append(t)
-                else:
-                    if t.proc.exitcode != 0:
-                        return ([], joinTasks(tasks))
-            return (active, [])
-
-        def remainingDepsDepth(task):
-            if task._d is None:
-                incompleteDeps = [d for d in task.deps if d.proc is None or d.proc.is_alive()]
-                if len(incompleteDeps) == 0:
-                    task._d = 0
-                else:
-                    task._d = max([remainingDepsDepth(t) for t in incompleteDeps]) + 1
-            return task._d
-
-        def compareTasks(t1, t2):
-            d = remainingDepsDepth(t1) - remainingDepsDepth(t2)
-            if d == 0:
-                t1Work = (1 + len(t1.proj.annotation_processors())) * len(t1.javafilelist)
-                t2Work = (1 + len(t2.proj.annotation_processors())) * len(t2.javafilelist)
-                d = t1Work - t2Work
-            return d
-
-        def sortWorklist(tasks):
-            for t in tasks:
-                t._d = None
-            return sorted(tasks, compareTasks)
-
-        cpus = cpu_count()
-        worklist = sortWorklist(tasks.values())
-        active = []
-        failed = []
-        while len(worklist) != 0:
-            while True:
-                active, failed = checkTasks(active)
-                if len(failed) != 0:
-                    assert not active, active
-                    break
-                if len(active) == cpus:
-                    # Sleep for 1 second
-                    time.sleep(1)
-                else:
-                    break
-
-            if len(failed) != 0:
-                break
-
-            def executeTask(task):
-                # Clear sub-process list cloned from parent process
-                del _currentSubprocesses[:]
-                task.execute()
-
-            def depsDone(task):
-                for d in task.deps:
-                    if d.proc is None or d.proc.exitcode is None:
-                        return False
-                return True
-
-            for task in worklist:
-                if depsDone(task):
-                    worklist.remove(task)
-                    task.proc = multiprocessing.Process(target=executeTask, args=(task,))
-                    task.proc.start()
-                    active.append(task)
-                    task.sub = _addSubprocess(task.proc, ['JavaCompileTask', str(task)])
-                if len(active) == cpus:
-                    break
-
-            worklist = sortWorklist(worklist)
-
-        failed += joinTasks(active)
-        if len(failed):
-            for t in failed:
-                log('Compiling {0} failed'.format(t.proj.name))
-            abort('{0} Java compilation tasks failed'.format(len(failed)))
-
-    if args.java and not args.only:
-        files = []
-        for dist in sorted_dists():
-            if dist not in updatedAnnotationProcessorDists:
-                archive(['@' + dist.name])
-            if args.check_distributions and not dist.isProcessorDistribution:
-                with zipfile.ZipFile(dist.path, 'r') as zf:
-                    files.extend([member for member in zf.namelist() if not member.startswith('META-INF')])
-        dups = set([x for x in files if files.count(x) > 1])
-        if len(dups) > 0:
-            abort('Distributions overlap! duplicates: ' + str(dups))
-
-    if suppliedParser:
-        return args
-    return None
-
-def _handleNonJavaFiles(outputDir, p, clean, nonjavafiletuples):
-    if exists(outputDir):
-        if clean:
-            log('Cleaning {0}...'.format(outputDir))
-            shutil.rmtree(outputDir)
-            os.mkdir(outputDir)
-    else:
-        os.mkdir(outputDir)
-    genDir = p.source_gen_dir()
-    if genDir != '' and exists(genDir) and clean:
-        log('Cleaning {0}...'.format(genDir))
-        for f in os.listdir(genDir):
-            shutil.rmtree(join(genDir, f))
-
-    # Copy all non Java resources or assemble Jasmin files
-    jasminAvailable = None
-    for nonjavafiletuple in nonjavafiletuples:
-        sourceDir = nonjavafiletuple[0]
-        nonjavafilelist = nonjavafiletuple[1]
-
-        for src in nonjavafilelist:
-            if src.endswith('.jasm'):
-                className = None
-                with open(src) as f:
-                    for line in f:
-                        if line.startswith('.class '):
-                            className = line.split()[-1]
-                            break
-
-                if className is not None:
-                    jasminOutputDir = p.jasmin_output_dir()
-                    classFile = join(jasminOutputDir, className.replace('/', os.sep) + '.class')
-                    if exists(dirname(classFile)) and (not exists(classFile) or os.path.getmtime(classFile) < os.path.getmtime(src)):
-                        if jasminAvailable is None:
-                            try:
-                                with open(os.devnull) as devnull:
-                                    subprocess.call('jasmin', stdout=devnull, stderr=subprocess.STDOUT)
-                                jasminAvailable = True
-                            except OSError:
-                                jasminAvailable = False
-
-                        if jasminAvailable:
-                            log('Assembling Jasmin file ' + src)
-                            run(['jasmin', '-d', jasminOutputDir, src])
-                        else:
-                            log('The jasmin executable could not be found - skipping ' + src)
-                            with file(classFile, 'a'):
-                                os.utime(classFile, None)
-
-                else:
-                    log('could not file .class directive in Jasmin source: ' + src)
-            else:
-                dst = join(outputDir, src[len(sourceDir) + 1:])
-                if not exists(dirname(dst)):
-                    os.makedirs(dirname(dst))
-                if exists(dirname(dst)) and (not exists(dst) or os.path.getmtime(dst) < os.path.getmtime(src)):
-                    shutil.copyfile(src, dst)
-
-def _chunk_files_for_command_line(files, limit=None, pathFunction=lambda f: f):
-    """
-    Returns a generator for splitting up a list of files into chunks such that the
-    size of the space separated file paths in a chunk is less than a given limit.
-    This is used to work around system command line length limits.
-    """
-    chunkSize = 0
-    chunkStart = 0
-    if limit is None:
-        commandLinePrefixAllowance = 3000
-        if get_os() == 'windows':
-            # The CreateProcess function on Windows limits the length of a command line to
-            # 32,768 characters (http://msdn.microsoft.com/en-us/library/ms682425%28VS.85%29.aspx)
-            limit = 32768 - commandLinePrefixAllowance
-        else:
-            # Using just SC_ARG_MAX without extra downwards adjustment
-            # results in "[Errno 7] Argument list too long" on MacOS.
-            commandLinePrefixAllowance = 20000
-            syslimit = os.sysconf('SC_ARG_MAX')
-            if syslimit == -1:
-                syslimit = 262144 # we could use sys.maxint but we prefer a more robust smaller value
-            limit = syslimit - commandLinePrefixAllowance
-            assert limit > 0
-    for i in range(len(files)):
-        path = pathFunction(files[i])
-        size = len(path) + 1
-        assert size < limit
-        if chunkSize + size < limit:
-            chunkSize += size
-        else:
-            assert i > chunkStart
-            yield files[chunkStart:i]
-            chunkStart = i
-            chunkSize = 0
-    if chunkStart == 0:
-        assert chunkSize < limit
-        yield files
-
-def eclipseformat(args):
-    """run the Eclipse Code Formatter on the Java sources
-
-    The exit code 1 denotes that at least one file was modified."""
-
-    parser = ArgumentParser(prog='mx eclipseformat')
-    parser.add_argument('-e', '--eclipse-exe', help='location of the Eclipse executable')
-    parser.add_argument('-C', '--no-backup', action='store_false', dest='backup', help='do not save backup of modified files')
-    parser.add_argument('--projects', action='store', help='comma separated projects to process (omit to process all projects)')
-
-    args = parser.parse_args(args)
-    if args.eclipse_exe is None:
-        args.eclipse_exe = os.environ.get('ECLIPSE_EXE')
-    if args.eclipse_exe is None:
-        abort('Could not find Eclipse executable. Use -e option or ensure ECLIPSE_EXE environment variable is set.')
-
-    # Maybe an Eclipse installation dir was specified - look for the executable in it
-    if isdir(args.eclipse_exe):
-        args.eclipse_exe = join(args.eclipse_exe, exe_suffix('eclipse'))
-        warn("The eclipse-exe was a directory, now using " + args.eclipse_exe)
-
-    if not os.path.isfile(args.eclipse_exe):
-        abort('File does not exist: ' + args.eclipse_exe)
-    if not os.access(args.eclipse_exe, os.X_OK):
-        abort('Not an executable file: ' + args.eclipse_exe)
-
-    eclipseinit([], buildProcessorJars=False)
-
-    # build list of projects to be processed
-    projects = sorted_deps()
-    if args.projects is not None:
-        projects = [project(name) for name in args.projects.split(',')]
-
-    class Batch:
-        def __init__(self, settingsDir, javaCompliance):
-            self.path = join(settingsDir, 'org.eclipse.jdt.core.prefs')
-            self.javaCompliance = javaCompliance
-            with open(join(settingsDir, 'org.eclipse.jdt.ui.prefs')) as fp:
-                jdtUiPrefs = fp.read()
-            self.removeTrailingWhitespace = 'sp_cleanup.remove_trailing_whitespaces_all=true' in jdtUiPrefs
-            if self.removeTrailingWhitespace:
-                assert 'sp_cleanup.remove_trailing_whitespaces=true' in jdtUiPrefs and 'sp_cleanup.remove_trailing_whitespaces_ignore_empty=false' in jdtUiPrefs
-            self.cachedHash = None
-
-        def __hash__(self):
-            if not self.cachedHash:
-                with open(self.path) as fp:
-                    self.cachedHash = (fp.read(), self.javaCompliance, self.removeTrailingWhitespace).__hash__()
-            return self.cachedHash
-
-        def __eq__(self, other):
-            if not isinstance(other, Batch):
-                return False
-            if self.removeTrailingWhitespace != other.removeTrailingWhitespace:
-                return False
-            if self.javaCompliance != other.javaCompliance:
-                return False
-            if self.path == other.path:
-                return True
-            with open(self.path) as fp:
-                with open(other.path) as ofp:
-                    if fp.read() != ofp.read():
-                        return False
-            return True
-
-    class FileInfo:
-        def __init__(self, path):
-            self.path = path
-            with open(path) as fp:
-                self.content = fp.read()
-            self.times = (os.path.getatime(path), os.path.getmtime(path))
-
-        def update(self, removeTrailingWhitespace):
-            with open(self.path) as fp:
-                content = fp.read()
-
-            if self.content != content:
-                # Only apply *after* formatting to match the order in which the IDE does it
-                if removeTrailingWhitespace:
-                    content, n = re.subn(r'[ \t]+$', '', content, flags=re.MULTILINE)
-                    if n != 0 and self.content == content:
-                        # undo on-disk changes made by the Eclipse formatter
-                        with open(self.path, 'w') as fp:
-                            fp.write(content)
-
-                if self.content != content:
-                    self.diff = difflib.unified_diff(self.content.splitlines(1), content.splitlines(1))
-                    self.content = content
-                    return True
-
-            # reset access and modification time of file
-            os.utime(self.path, self.times)
-
-    modified = list()
-    batches = dict()  # all sources with the same formatting settings are formatted together
-    for p in projects:
-        if p.native:
-            continue
-        sourceDirs = p.source_dirs()
-
-        batch = Batch(join(p.dir, '.settings'), p.javaCompliance)
-
-        if not exists(batch.path):
-            if _opts.verbose:
-                log('[no Eclipse Code Formatter preferences at {0} - skipping]'.format(batch.path))
-            continue
-        javafiles = []
-        for sourceDir in sourceDirs:
-            for root, _, files in os.walk(sourceDir):
-                for f in [join(root, name) for name in files if name.endswith('.java')]:
-                    javafiles.append(FileInfo(f))
-        if len(javafiles) == 0:
-            logv('[no Java sources in {0} - skipping]'.format(p.name))
-            continue
-
-        res = batches.setdefault(batch, javafiles)
-        if res is not javafiles:
-            res.extend(javafiles)
-
-    log("we have: " + str(len(batches)) + " batches")
-    for batch, javafiles in batches.iteritems():
-        for chunk in _chunk_files_for_command_line(javafiles, pathFunction=lambda f: f.path):
-            run([args.eclipse_exe,
-                '-nosplash',
-                '-application',
-                'org.eclipse.jdt.core.JavaCodeFormatter',
-                '-vm', java(batch.javaCompliance).java,
-                '-config', batch.path]
-                + [f.path for f in chunk])
-            for fi in chunk:
-                if fi.update(batch.removeTrailingWhitespace):
-                    modified.append(fi)
-
-    log('{0} files were modified'.format(len(modified)))
-
-    if len(modified) != 0:
-        arcbase = _primary_suite.dir
-        if args.backup:
-            backup = os.path.abspath('eclipseformat.backup.zip')
-            zf = zipfile.ZipFile(backup, 'w', zipfile.ZIP_DEFLATED)
-        for fi in modified:
-            name = os.path.relpath(fi.path, arcbase)
-            log(' - {0}'.format(name))
-            log('Changes:')
-            log(''.join(fi.diff))
-            if args.backup:
-                arcname = name.replace(os.sep, '/')
-                zf.writestr(arcname, fi.content)
-        if args.backup:
-            zf.close()
-            log('Wrote backup of {0} modified files to {1}'.format(len(modified), backup))
-        return 1
-    return 0
-
-def processorjars():
-    for s in suites(True):
-        _processorjars_suite(s)
-
-def _processorjars_suite(s):
-    projs = [p for p in s.projects if p.definedAnnotationProcessors is not None]
-    if len(projs) <= 0:
-        return []
-
-    pnames = [p.name for p in projs]
-    build(['--jdt-warning-as-error', '--projects', ",".join(pnames)])
-    return [p.definedAnnotationProcessorsDist.path for p in s.projects if p.definedAnnotationProcessorsDist is not None]
-
-def pylint(args):
-    """run pylint (if available) over Python source files (found by 'hg locate' or by tree walk with -walk)"""
-
-    parser = ArgumentParser(prog='mx pylint')
-    parser.add_argument('--walk', action='store_true', help='use tree walk find .py files')
-    args = parser.parse_args(args)
-
-    rcfile = join(dirname(__file__), '.pylintrc')
-    if not exists(rcfile):
-        log('pylint configuration file does not exist: ' + rcfile)
-        return
-
-    try:
-        output = subprocess.check_output(['pylint', '--version'], stderr=subprocess.STDOUT)
-        m = re.match(r'.*pylint (\d+)\.(\d+)\.(\d+).*', output, re.DOTALL)
-        if not m:
-            log('could not determine pylint version from ' + output)
-            return
-        major, minor, micro = (int(m.group(1)), int(m.group(2)), int(m.group(3)))
-        if major != 1 or minor != 1:
-            log('require pylint version = 1.1.x (got {0}.{1}.{2})'.format(major, minor, micro))
-            return
-    except BaseException:
-        log('pylint is not available')
-        return
-
-    def findfiles_by_walk():
-        result = []
-        for suite in suites(True):
-            for root, dirs, files in os.walk(suite.dir):
-                for f in files:
-                    if f.endswith('.py'):
-                        pyfile = join(root, f)
-                        result.append(pyfile)
-                if 'bin' in dirs:
-                    dirs.remove('bin')
-                if 'lib' in dirs:
-                    # avoids downloaded .py files
-                    dirs.remove('lib')
-        return result
-
-    def findfiles_by_hg():
-        result = []
-        for suite in suites(True):
-            versioned = subprocess.check_output(['hg', 'locate', '-f'], stderr=subprocess.STDOUT, cwd=suite.dir).split(os.linesep)
-            for f in versioned:
-                if f.endswith('.py') and exists(f):
-                    result.append(f)
-        return result
-
-    # Perhaps we should just look in suite.mxDir directories for .py files?
-    if args.walk:
-        pyfiles = findfiles_by_walk()
-    else:
-        pyfiles = findfiles_by_hg()
-
-    env = os.environ.copy()
-
-    pythonpath = dirname(__file__)
-    for suite in suites(True):
-        pythonpath = os.pathsep.join([pythonpath, suite.mxDir])
-
-    env['PYTHONPATH'] = pythonpath
-
-    for pyfile in pyfiles:
-        log('Running pylint on ' + pyfile + '...')
-        run(['pylint', '--reports=n', '--rcfile=' + rcfile, pyfile], env=env)
-
-"""
-Utility for creating and updating a zip file atomically.
-"""
-class Archiver:
-    def __init__(self, path):
-        self.path = path
-
-    def __enter__(self):
-        if self.path:
-            if not isdir(dirname(self.path)):
-                os.makedirs(dirname(self.path))
-            fd, tmp = tempfile.mkstemp(suffix='', prefix=basename(self.path) + '.', dir=dirname(self.path))
-            self.tmpFd = fd
-            self.tmpPath = tmp
-            self.zf = zipfile.ZipFile(tmp, 'w')
-        else:
-            self.tmpFd = None
-            self.tmpPath = None
-            self.zf = None
-        return self
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        if self.zf:
-            self.zf.close()
-            os.close(self.tmpFd)
-            # Correct the permissions on the temporary file which is created with restrictive permissions
-            os.chmod(self.tmpPath, 0o666 & ~currentUmask)
-            # Atomic on Unix
-            shutil.move(self.tmpPath, self.path)
-
-def _archive(args):
-    archive(args)
-    return 0
-
-def archive(args):
-    """create jar files for projects and distributions"""
-    parser = ArgumentParser(prog='mx archive')
-    parser.add_argument('names', nargs=REMAINDER, metavar='[<project>|@<distribution>]...')
-    args = parser.parse_args(args)
-
-    archives = []
-    for name in args.names:
-        if name.startswith('@'):
-            dname = name[1:]
-            d = distribution(dname)
-            d.make_archive()
-            archives.append(d.path)
-        else:
-            p = project(name)
-            archives.append(p.make_archive())
-
-    logv("generated archives: " + str(archives))
-    return archives
-
-def canonicalizeprojects(args):
-    """check all project specifications for canonical dependencies
-
-    The exit code of this command reflects how many projects have non-canonical dependencies."""
-
-    nonCanonical = []
-    for s in suites(True):
-        for p in s.projects:
-            if p.checkPackagePrefix:
-                for pkg in p.defined_java_packages():
-                    if not pkg.startswith(p.name):
-                        abort('package in {0} does not have prefix matching project name: {1}'.format(p, pkg))
-
-            ignoredDeps = set([name for name in p.deps if project(name, False) is not None])
-            for pkg in p.imported_java_packages():
-                for name in p.deps:
-                    dep = project(name, False)
-                    if dep is None:
-                        ignoredDeps.discard(name)
-                    else:
-                        if pkg in dep.defined_java_packages():
-                            ignoredDeps.discard(name)
-                        if pkg in dep.extended_java_packages():
-                            ignoredDeps.discard(name)
-            if len(ignoredDeps) != 0:
-                candidates = set()
-                # Compute dependencies based on projects required by p
-                for d in sorted_deps():
-                    if not d.defined_java_packages().isdisjoint(p.imported_java_packages()):
-                        candidates.add(d)
-                # Remove non-canonical candidates
-                for c in list(candidates):
-                    candidates.difference_update(c.all_deps([], False, False))
-                candidates = [d.name for d in candidates]
-
-                abort('{0} does not use any packages defined in these projects: {1}\nComputed project dependencies: {2}'.format(
-                    p, ', '.join(ignoredDeps), ','.join(candidates)))
-
-            excess = frozenset(p.deps) - set(p.canonical_deps())
-            if len(excess) != 0:
-                nonCanonical.append(p)
-    if len(nonCanonical) != 0:
-        for p in nonCanonical:
-            canonicalDeps = p.canonical_deps()
-            if len(canonicalDeps) != 0:
-                log('Canonical dependencies for project ' + p.name + ' are: [')
-                for d in canonicalDeps:
-                    log('        "' + d + '",')
-                log('      ],')
-            else:
-                log('Canonical dependencies for project ' + p.name + ' are: []')
-    return len(nonCanonical)
-
-class TimeStampFile:
-    def __init__(self, path):
-        self.path = path
-        self.timestamp = os.path.getmtime(path) if exists(path) else None
-
-    def isOlderThan(self, arg):
-        if not self.timestamp:
-            return True
-        if isinstance(arg, TimeStampFile):
-            if arg.timestamp is None:
-                return False
-            else:
-                return arg.timestamp > self.timestamp
-        elif isinstance(arg, types.ListType):
-            files = arg
-        else:
-            files = [arg]
-        for f in files:
-            if os.path.getmtime(f) > self.timestamp:
-                return True
-        return False
-
-    def exists(self):
-        return exists(self.path)
-
-    def touch(self):
-        if exists(self.path):
-            os.utime(self.path, None)
-        else:
-            if not isdir(dirname(self.path)):
-                os.makedirs(dirname(self.path))
-            file(self.path, 'a')
-
-def checkstyle(args):
-    """run Checkstyle on the Java sources
-
-   Run Checkstyle over the Java sources. Any errors or warnings
-   produced by Checkstyle result in a non-zero exit code."""
-
-    parser = ArgumentParser(prog='mx checkstyle')
-
-    parser.add_argument('-f', action='store_true', dest='force', help='force checking (disables timestamp checking)')
-    args = parser.parse_args(args)
-
-    totalErrors = 0
-    for p in projects_opt_limit_to_suites():
-        if p.native:
-            continue
-        sourceDirs = p.source_dirs()
-
-        config = join(project(p.checkstyleProj).dir, '.checkstyle_checks.xml')
-        if not exists(config):
-            logv('[No Checkstyle configuration found for {0} - skipping]'.format(p))
-            continue
-
-        # skip checking this Java project if its Java compliance level is "higher" than the configured JDK
-        jdk = java(p.javaCompliance)
-        assert jdk
-
-        for sourceDir in sourceDirs:
-            javafilelist = []
-            for root, _, files in os.walk(sourceDir):
-                javafilelist += [join(root, name) for name in files if name.endswith('.java') and name != 'package-info.java']
-            if len(javafilelist) == 0:
-                logv('[no Java sources in {0} - skipping]'.format(sourceDir))
-                continue
-
-            timestamp = TimeStampFile(join(p.suite.mxDir, 'checkstyle-timestamps', sourceDir[len(p.suite.dir) + 1:].replace(os.sep, '_') + '.timestamp'))
-            mustCheck = False
-            if not args.force and timestamp.exists():
-                mustCheck = timestamp.isOlderThan(javafilelist)
-            else:
-                mustCheck = True
-
-            if not mustCheck:
-                if _opts.verbose:
-                    log('[all Java sources in {0} already checked - skipping]'.format(sourceDir))
-                continue
-
-            exclude = join(p.dir, '.checkstyle.exclude')
-            if exists(exclude):
-                with open(exclude) as f:
-                    # Convert patterns to OS separators
-                    patterns = [name.rstrip().replace('/', os.sep) for name in f.readlines()]
-                def match(name):
-                    for p in patterns:
-                        if p in name:
-                            if _opts.verbose:
-                                log('excluding: ' + name)
-                            return True
-                    return False
-
-                javafilelist = [name for name in javafilelist if not match(name)]
-
-            auditfileName = join(p.dir, 'checkstyleOutput.txt')
-            log('Running Checkstyle on {0} using {1}...'.format(sourceDir, config))
-
-            try:
-                for chunk in _chunk_files_for_command_line(javafilelist):
-                    try:
-                        run_java(['-Xmx1g', '-jar', library('CHECKSTYLE').get_path(True), '-f', 'xml', '-c', config, '-o', auditfileName] + chunk, nonZeroIsFatal=False)
-                    finally:
-                        if exists(auditfileName):
-                            errors = []
-                            source = [None]
-                            def start_element(name, attrs):
-                                if name == 'file':
-                                    source[0] = attrs['name']
-                                elif name == 'error':
-                                    errors.append('{0}:{1}: {2}'.format(source[0], attrs['line'], attrs['message']))
-
-                            xp = xml.parsers.expat.ParserCreate()
-                            xp.StartElementHandler = start_element
-                            with open(auditfileName) as fp:
-                                xp.ParseFile(fp)
-                            if len(errors) != 0:
-                                map(log, errors)
-                                totalErrors = totalErrors + len(errors)
-                            else:
-                                timestamp.touch()
-            finally:
-                if exists(auditfileName):
-                    os.unlink(auditfileName)
-    return totalErrors
-
-def clean(args, parser=None):
-    """remove all class files, images, and executables
-
-    Removes all files created by a build, including Java class files, executables, and
-    generated images.
-    """
-
-    suppliedParser = parser is not None
-
-    parser = parser if suppliedParser else ArgumentParser(prog='mx clean')
-    parser.add_argument('--no-native', action='store_false', dest='native', help='do not clean native projects')
-    parser.add_argument('--no-java', action='store_false', dest='java', help='do not clean Java projects')
-    parser.add_argument('--no-dist', action='store_false', dest='dist', help='do not delete distributions')
-
-    args = parser.parse_args(args)
-
-    def _rmtree(dirPath):
-        path = dirPath
-        if get_os() == 'windows':
-            path = unicode("\\\\?\\" + dirPath)
-        shutil.rmtree(path)
-
-    def _rmIfExists(name):
-        if name and os.path.isfile(name):
-            os.unlink(name)
-
-    for p in projects_opt_limit_to_suites():
-        if p.native:
-            if args.native:
-                run([gmake_cmd(), '-C', p.dir, 'clean'])
-        else:
-            if args.java:
-                genDir = p.source_gen_dir()
-                if genDir != '' and exists(genDir):
-                    log('Clearing {0}...'.format(genDir))
-                    for f in os.listdir(genDir):
-                        _rmtree(join(genDir, f))
-
-
-                outputDir = p.output_dir()
-                if outputDir != '' and exists(outputDir):
-                    log('Removing {0}...'.format(outputDir))
-                    _rmtree(outputDir)
-
-            for configName in ['netbeans-config.zip', 'eclipse-config.zip']:
-                config = TimeStampFile(join(p.suite.mxDir, configName))
-                if config.exists():
-                    os.unlink(config.path)
-
-    if args.java:
-        if args.dist:
-            for d in _dists.keys():
-                log('Removing distribution {0}...'.format(d))
-                _rmIfExists(distribution(d).path)
-                _rmIfExists(distribution(d).sourcesPath)
-
-    if suppliedParser:
-        return args
-
-def about(args):
-    """show the 'man page' for mx"""
-    print __doc__
-
-def help_(args):
-    """show help for a given command
-
-With no arguments, print a list of commands and short help for each command.
-
-Given a command name, print help for that command."""
-    if len(args) == 0:
-        _argParser.print_help()
-        return
-
-    name = args[0]
-    if not _commands.has_key(name):
-        hits = [c for c in _commands.iterkeys() if c.startswith(name)]
-        if len(hits) == 1:
-            name = hits[0]
-        elif len(hits) == 0:
-            abort('mx: unknown command \'{0}\'\n{1}use "mx help" for more options'.format(name, _format_commands()))
-        else:
-            abort('mx: command \'{0}\' is ambiguous\n    {1}'.format(name, ' '.join(hits)))
-
-    value = _commands[name]
-    (func, usage) = value[:2]
-    doc = func.__doc__
-    if len(value) > 2:
-        docArgs = value[2:]
-        fmtArgs = []
-        for d in docArgs:
-            if isinstance(d, Callable):
-                fmtArgs += [d()]
-            else:
-                fmtArgs += [str(d)]
-        doc = doc.format(*fmtArgs)
-    print 'mx {0} {1}\n\n{2}\n'.format(name, usage, doc)
-
-def projectgraph(args, suite=None):
-    """create graph for project structure ("mx projectgraph | dot -Tpdf -oprojects.pdf" or "mx projectgraph --igv")"""
-
-    parser = ArgumentParser(prog='mx projectgraph')
-    parser.add_argument('--igv', action='store_true', help='output to IGV listening on 127.0.0.1:4444')
-    parser.add_argument('--igv-format', action='store_true', help='output graph in IGV format')
-
-    args = parser.parse_args(args)
-
-    if args.igv or args.igv_format:
-        ids = {}
-        nextToIndex = {}
-        igv = XMLDoc()
-        igv.open('graphDocument')
-        igv.open('group')
-        igv.open('properties')
-        igv.element('p', {'name' : 'name'}, 'GraalProjectDependencies')
-        igv.close('properties')
-        igv.open('graph', {'name' : 'dependencies'})
-        igv.open('nodes')
-        for p in sorted_deps(includeLibs=True, includeJreLibs=True):
-            ident = len(ids)
-            ids[p.name] = str(ident)
-            igv.open('node', {'id' : str(ident)})
-            igv.open('properties')
-            igv.element('p', {'name' : 'name'}, p.name)
-            igv.close('properties')
-            igv.close('node')
-        igv.close('nodes')
-        igv.open('edges')
-        for p in projects():
-            fromIndex = 0
-            for dep in p.canonical_deps():
-                toIndex = nextToIndex.get(dep, 0)
-                nextToIndex[dep] = toIndex + 1
-                igv.element('edge', {'from' : ids[p.name], 'fromIndex' : str(fromIndex), 'to' : ids[dep], 'toIndex' : str(toIndex), 'label' : 'dependsOn'})
-                fromIndex = fromIndex + 1
-        igv.close('edges')
-        igv.close('graph')
-        igv.close('group')
-        igv.close('graphDocument')
-
-        if args.igv:
-            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-            s.connect(('127.0.0.1', 4444))
-            s.send(igv.xml())
-        else:
-            print igv.xml(indent='  ', newl='\n')
-        return
-
-    print 'digraph projects {'
-    print 'rankdir=BT;'
-    print 'node [shape=rect];'
-    for p in projects():
-        for dep in p.canonical_deps():
-            print '"' + p.name + '"->"' + dep + '";'
-        if hasattr(p, '_declaredAnnotationProcessors'):
-            for ap in p._declaredAnnotationProcessors:
-                print '"' + p.name + '"->"' + ap + '" [style="dashed"];'
-    print '}'
-
-def _source_locator_memento(deps):
-    slm = XMLDoc()
-    slm.open('sourceLookupDirector')
-    slm.open('sourceContainers', {'duplicates' : 'false'})
-
-    javaCompliance = None
-    for dep in deps:
-        if dep.isLibrary():
-            if hasattr(dep, 'eclipse.container'):
-                memento = XMLDoc().element('classpathContainer', {'path' : getattr(dep, 'eclipse.container')}).xml(standalone='no')
-                slm.element('classpathContainer', {'memento' : memento, 'typeId':'org.eclipse.jdt.launching.sourceContainer.classpathContainer'})
-            elif dep.get_source_path(resolve=True):
-                memento = XMLDoc().element('archive', {'detectRoot' : 'true', 'path' : dep.get_source_path(resolve=True)}).xml(standalone='no')
-                slm.element('container', {'memento' : memento, 'typeId':'org.eclipse.debug.core.containerType.externalArchive'})
-        elif dep.isProject():
-            memento = XMLDoc().element('javaProject', {'name' : dep.name}).xml(standalone='no')
-            slm.element('container', {'memento' : memento, 'typeId':'org.eclipse.jdt.launching.sourceContainer.javaProject'})
-            if javaCompliance is None or dep.javaCompliance > javaCompliance:
-                javaCompliance = dep.javaCompliance
-
-    if javaCompliance:
-        memento = XMLDoc().element('classpathContainer', {'path' : 'org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-' + str(javaCompliance)}).xml(standalone='no')
-        slm.element('classpathContainer', {'memento' : memento, 'typeId':'org.eclipse.jdt.launching.sourceContainer.classpathContainer'})
-    else:
-        memento = XMLDoc().element('classpathContainer', {'path' : 'org.eclipse.jdt.launching.JRE_CONTAINER'}).xml(standalone='no')
-        slm.element('classpathContainer', {'memento' : memento, 'typeId':'org.eclipse.jdt.launching.sourceContainer.classpathContainer'})
-
-    slm.close('sourceContainers')
-    slm.close('sourceLookupDirector')
-    return slm
-
-def make_eclipse_attach(suite, hostname, port, name=None, deps=None):
-    """
-    Creates an Eclipse launch configuration file for attaching to a Java process.
-    """
-    if deps is None:
-        deps = []
-    slm = _source_locator_memento(deps)
-    launch = XMLDoc()
-    launch.open('launchConfiguration', {'type' : 'org.eclipse.jdt.launching.remoteJavaApplication'})
-    launch.element('stringAttribute', {'key' : 'org.eclipse.debug.core.source_locator_id', 'value' : 'org.eclipse.jdt.launching.sourceLocator.JavaSourceLookupDirector'})
-    launch.element('stringAttribute', {'key' : 'org.eclipse.debug.core.source_locator_memento', 'value' : '%s'})
-    launch.element('booleanAttribute', {'key' : 'org.eclipse.jdt.launching.ALLOW_TERMINATE', 'value' : 'true'})
-    launch.open('mapAttribute', {'key' : 'org.eclipse.jdt.launching.CONNECT_MAP'})
-    launch.element('mapEntry', {'key' : 'hostname', 'value' : hostname})
-    launch.element('mapEntry', {'key' : 'port', 'value' : port})
-    launch.close('mapAttribute')
-    launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.PROJECT_ATTR', 'value' : ''})
-    launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.VM_CONNECTOR_ID', 'value' : 'org.eclipse.jdt.launching.socketAttachConnector'})
-    launch.close('launchConfiguration')
-    launch = launch.xml(newl='\n', standalone='no') % slm.xml(escape=True, standalone='no')
-
-    if name is None:
-        if len(suites()) == 1:
-            suitePrefix = ''
-        else:
-            suitePrefix = suite.name + '-'
-        name = suitePrefix + 'attach-' + hostname + '-' + port
-    eclipseLaunches = join(suite.mxDir, 'eclipse-launches')
-    if not exists(eclipseLaunches):
-        os.makedirs(eclipseLaunches)
-    launchFile = join(eclipseLaunches, name + '.launch')
-    return update_file(launchFile, launch), launchFile
-
-def make_eclipse_launch(javaArgs, jre, name=None, deps=None):
-    """
-    Creates an Eclipse launch configuration file for running/debugging a Java command.
-    """
-    if deps is None:
-        deps = []
-    mainClass = None
-    vmArgs = []
-    appArgs = []
-    cp = None
-    argsCopy = list(reversed(javaArgs))
-    while len(argsCopy) != 0:
-        a = argsCopy.pop()
-        if a == '-jar':
-            mainClass = '-jar'
-            appArgs = list(reversed(argsCopy))
-            break
-        if a == '-cp' or a == '-classpath':
-            assert len(argsCopy) != 0
-            cp = argsCopy.pop()
-            vmArgs.append(a)
-            vmArgs.append(cp)
-        elif a.startswith('-'):
-            vmArgs.append(a)
-        else:
-            mainClass = a
-            appArgs = list(reversed(argsCopy))
-            break
-
-    if mainClass is None:
-        log('Cannot create Eclipse launch configuration without main class or jar file: java ' + ' '.join(javaArgs))
-        return False
-
-    if name is None:
-        if mainClass == '-jar':
-            name = basename(appArgs[0])
-            if len(appArgs) > 1 and not appArgs[1].startswith('-'):
-                name = name + '_' + appArgs[1]
-        else:
-            name = mainClass
-        name = time.strftime('%Y-%m-%d-%H%M%S_' + name)
-
-    if cp is not None:
-        for e in cp.split(os.pathsep):
-            for s in suites():
-                deps += [p for p in s.projects if e == p.output_dir()]
-                deps += [l for l in s.libs if e == l.get_path(False)]
-
-    slm = _source_locator_memento(deps)
-
-    launch = XMLDoc()
-    launch.open('launchConfiguration', {'type' : 'org.eclipse.jdt.launching.localJavaApplication'})
-    launch.element('stringAttribute', {'key' : 'org.eclipse.debug.core.source_locator_id', 'value' : 'org.eclipse.jdt.launching.sourceLocator.JavaSourceLookupDirector'})
-    launch.element('stringAttribute', {'key' : 'org.eclipse.debug.core.source_locator_memento', 'value' : '%s'})
-    launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.JRE_CONTAINER', 'value' : 'org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/' + jre})
-    launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.MAIN_TYPE', 'value' : mainClass})
-    launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.PROGRAM_ARGUMENTS', 'value' : ' '.join(appArgs)})
-    launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.PROJECT_ATTR', 'value' : ''})
-    launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.VM_ARGUMENTS', 'value' : ' '.join(vmArgs)})
-    launch.close('launchConfiguration')
-    launch = launch.xml(newl='\n', standalone='no') % slm.xml(escape=True, standalone='no')
-
-    eclipseLaunches = join('mx', 'eclipse-launches')
-    if not exists(eclipseLaunches):
-        os.makedirs(eclipseLaunches)
-    return update_file(join(eclipseLaunches, name + '.launch'), launch)
-
-def eclipseinit(args, buildProcessorJars=True, refreshOnly=False):
-    """(re)generate Eclipse project configurations and working sets"""
-    for s in suites(True):
-        _eclipseinit_suite(args, s, buildProcessorJars, refreshOnly)
-
-    generate_eclipse_workingsets()
-
-def _check_ide_timestamp(suite, configZip, ide):
-    """return True if and only if the projects file, eclipse-settings files, and mx itself are all older than configZip"""
-    suitePyFiles = [join(suite.mxDir, e) for e in os.listdir(suite.mxDir) if e.startswith('suite') and e.endswith('.py')]
-    if configZip.isOlderThan(suitePyFiles):
-        return False
-    # Assume that any mx change might imply changes to the generated IDE files
-    if configZip.isOlderThan(__file__):
-        return False
-
-    if ide == 'eclipse':
-        eclipseSettingsDir = join(suite.mxDir, 'eclipse-settings')
-        if exists(eclipseSettingsDir):
-            for name in os.listdir(eclipseSettingsDir):
-                path = join(eclipseSettingsDir, name)
-                if configZip.isOlderThan(path):
-                    return False
-    return True
-
-def _eclipseinit_project(p, files=None, libFiles=None):
-    assert java(p.javaCompliance)
-
-    if not exists(p.dir):
-        os.makedirs(p.dir)
-
-    out = XMLDoc()
-    out.open('classpath')
-
-    for src in p.srcDirs:
-        srcDir = join(p.dir, src)
-        if not exists(srcDir):
-            os.mkdir(srcDir)
-        out.element('classpathentry', {'kind' : 'src', 'path' : src})
-
-    processorPath = p.annotation_processors_path()
-    if processorPath:
-        genDir = p.source_gen_dir()
-        if not exists(genDir):
-            os.mkdir(genDir)
-        out.open('classpathentry', {'kind' : 'src', 'path' : 'src_gen'})
-        if p.uses_annotation_processor_library():
-            # ignore warnings produced by third-party annotation processors
-            out.open('attributes')
-            out.element('attribute', {'name' : 'ignore_optional_problems', 'value' : 'true'})
-            out.close('attributes')
-        out.close('classpathentry')
-
-        if files:
-            files.append(genDir)
-
-    # Every Java program depends on a JRE
-    out.element('classpathentry', {'kind' : 'con', 'path' : 'org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-' + str(p.javaCompliance)})
-
-    if exists(join(p.dir, 'plugin.xml')):  # eclipse plugin project
-        out.element('classpathentry', {'kind' : 'con', 'path' : 'org.eclipse.pde.core.requiredPlugins'})
-
-    containerDeps = set()
-    libraryDeps = set()
-    projectDeps = set()
-
-    for dep in p.all_deps([], True):
-        if dep == p:
-            continue
-        if dep.isLibrary():
-            if hasattr(dep, 'eclipse.container'):
-                container = getattr(dep, 'eclipse.container')
-                containerDeps.add(container)
-                libraryDeps -= set(dep.all_deps([], True))
-            else:
-                libraryDeps.add(dep)
-        elif dep.isProject():
-            projectDeps.add(dep)
-
-    for dep in sorted(containerDeps):
-        out.element('classpathentry', {'exported' : 'true', 'kind' : 'con', 'path' : dep})
-
-    for dep in sorted(libraryDeps):
-        path = dep.path
-        dep.get_path(resolve=True)
-
-        # Relative paths for "lib" class path entries have various semantics depending on the Eclipse
-        # version being used (e.g. see https://bugs.eclipse.org/bugs/show_bug.cgi?id=274737) so it's
-        # safest to simply use absolute paths.
-        path = _make_absolute(path, p.suite.dir)
-
-        attributes = {'exported' : 'true', 'kind' : 'lib', 'path' : path}
-
-        sourcePath = dep.get_source_path(resolve=True)
-        if sourcePath is not None:
-            attributes['sourcepath'] = sourcePath
-        out.element('classpathentry', attributes)
-        if libFiles:
-            libFiles.append(path)
-
-    for dep in sorted(projectDeps):
-        out.element('classpathentry', {'combineaccessrules' : 'false', 'exported' : 'true', 'kind' : 'src', 'path' : '/' + dep.name})
-
-    out.element('classpathentry', {'kind' : 'output', 'path' : getattr(p, 'eclipse.output', 'bin')})
-    out.close('classpath')
-    classpathFile = join(p.dir, '.classpath')
-    update_file(classpathFile, out.xml(indent='\t', newl='\n'))
-    if files:
-        files.append(classpathFile)
-
-    csConfig = join(project(p.checkstyleProj).dir, '.checkstyle_checks.xml')
-    if exists(csConfig):
-        out = XMLDoc()
-
-        dotCheckstyle = join(p.dir, ".checkstyle")
-        checkstyleConfigPath = '/' + p.checkstyleProj + '/.checkstyle_checks.xml'
-        out.open('fileset-config', {'file-format-version' : '1.2.0', 'simple-config' : 'true'})
-        out.open('local-check-config', {'name' : 'Checks', 'location' : checkstyleConfigPath, 'type' : 'project', 'description' : ''})
-        out.element('additional-data', {'name' : 'protect-config-file', 'value' : 'false'})
-        out.close('local-check-config')
-        out.open('fileset', {'name' : 'all', 'enabled' : 'true', 'check-config-name' : 'Checks', 'local' : 'true'})
-        out.element('file-match-pattern', {'match-pattern' : '.', 'include-pattern' : 'true'})
-        out.close('fileset')
-        out.open('filter', {'name' : 'all', 'enabled' : 'true', 'check-config-name' : 'Checks', 'local' : 'true'})
-        out.element('filter-data', {'value' : 'java'})
-        out.close('filter')
-
-        exclude = join(p.dir, '.checkstyle.exclude')
-        if exists(exclude):
-            out.open('filter', {'name' : 'FilesFromPackage', 'enabled' : 'true'})
-            with open(exclude) as f:
-                for line in f:
-                    if not line.startswith('#'):
-                        line = line.strip()
-                        exclDir = join(p.dir, line)
-                        assert isdir(exclDir), 'excluded source directory listed in ' + exclude + ' does not exist or is not a directory: ' + exclDir
-                    out.element('filter-data', {'value' : line})
-            out.close('filter')
-
-        out.close('fileset-config')
-        update_file(dotCheckstyle, out.xml(indent='  ', newl='\n'))
-        if files:
-            files.append(dotCheckstyle)
-    else:
-        # clean up existing .checkstyle file
-        dotCheckstyle = join(p.dir, ".checkstyle")
-        if exists(dotCheckstyle):
-            os.unlink(dotCheckstyle)
-
-    out = XMLDoc()
-    out.open('projectDescription')
-    out.element('name', data=p.name)
-    out.element('comment', data='')
-    out.element('projects', data='')
-    out.open('buildSpec')
-    out.open('buildCommand')
-    out.element('name', data='org.eclipse.jdt.core.javabuilder')
-    out.element('arguments', data='')
-    out.close('buildCommand')
-    if exists(csConfig):
-        out.open('buildCommand')
-        out.element('name', data='net.sf.eclipsecs.core.CheckstyleBuilder')
-        out.element('arguments', data='')
-        out.close('buildCommand')
-    if exists(join(p.dir, 'plugin.xml')):  # eclipse plugin project
-        for buildCommand in ['org.eclipse.pde.ManifestBuilder', 'org.eclipse.pde.SchemaBuilder']:
-            out.open('buildCommand')
-            out.element('name', data=buildCommand)
-            out.element('arguments', data='')
-            out.close('buildCommand')
-
-    if p.definedAnnotationProcessorsDist:
-        # Create a launcher that will (re)build the annotation processor
-        # jar any time one of its sources is modified.
-        dist = p.definedAnnotationProcessorsDist
-
-        distProjects = [d for d in dist.sorted_deps(transitive=True) if d.isProject()]
-        relevantResources = []
-        for p in distProjects:
-            for srcDir in p.source_dirs():
-                relevantResources.append(join(p.name, os.path.relpath(srcDir, p.dir)))
-            relevantResources.append(join(p.name, os.path.relpath(p.output_dir(), p.dir)))
-
-        # The path should always be p.name/dir independent of where the workspace actually is.
-        # So we use the parent folder of the project, whatever that is, to generate such a relative path.
-        logicalWorkspaceRoot = os.path.dirname(p.dir)
-        refreshFile = os.path.relpath(p.definedAnnotationProcessorsDist.path, logicalWorkspaceRoot)
-        _genEclipseBuilder(out, p, 'CreateAnnotationProcessorJar', 'archive @' + dist.name, refresh=True, refreshFile=refreshFile, relevantResources=relevantResources, async=True, xmlIndent='', xmlStandalone='no')
-
-    out.close('buildSpec')
-    out.open('natures')
-    out.element('nature', data='org.eclipse.jdt.core.javanature')
-    if exists(csConfig):
-        out.element('nature', data='net.sf.eclipsecs.core.CheckstyleNature')
-    if exists(join(p.dir, 'plugin.xml')):  # eclipse plugin project
-        out.element('nature', data='org.eclipse.pde.PluginNature')
-    out.close('natures')
-    out.close('projectDescription')
-    projectFile = join(p.dir, '.project')
-    update_file(projectFile, out.xml(indent='\t', newl='\n'))
-    if files:
-        files.append(projectFile)
-
-    settingsDir = join(p.dir, ".settings")
-    if not exists(settingsDir):
-        os.mkdir(settingsDir)
-
-    # collect the defaults from mxtool
-    defaultEclipseSettingsDir = join(dirname(__file__), 'eclipse-settings')
-    esdict = {}
-    if exists(defaultEclipseSettingsDir):
-        for name in os.listdir(defaultEclipseSettingsDir):
-            if isfile(join(defaultEclipseSettingsDir, name)):
-                esdict[name] = os.path.abspath(join(defaultEclipseSettingsDir, name))
-
-    # check for suite overrides
-    eclipseSettingsDir = join(p.suite.mxDir, 'eclipse-settings')
-    if exists(eclipseSettingsDir):
-        for name in os.listdir(eclipseSettingsDir):
-            if isfile(join(eclipseSettingsDir, name)):
-                esdict[name] = os.path.abspath(join(eclipseSettingsDir, name))
-
-    # check for project overrides
-    projectSettingsDir = join(p.dir, 'eclipse-settings')
-    if exists(projectSettingsDir):
-        for name in os.listdir(projectSettingsDir):
-            if isfile(join(projectSettingsDir, name)):
-                esdict[name] = os.path.abspath(join(projectSettingsDir, name))
-
-    # copy a possibly modified file to the project's .settings directory
-    for name, path in esdict.iteritems():
-        # ignore this file altogether if this project has no annotation processors
-        if name == "org.eclipse.jdt.apt.core.prefs" and not processorPath:
-            continue
-
-        with open(path) as f:
-            content = f.read()
-        content = content.replace('${javaCompliance}', str(p.javaCompliance))
-        if processorPath:
-            content = content.replace('org.eclipse.jdt.core.compiler.processAnnotations=disabled', 'org.eclipse.jdt.core.compiler.processAnnotations=enabled')
-        update_file(join(settingsDir, name), content)
-        if files:
-            files.append(join(settingsDir, name))
-
-    if processorPath:
-        out = XMLDoc()
-        out.open('factorypath')
-        out.element('factorypathentry', {'kind' : 'PLUGIN', 'id' : 'org.eclipse.jst.ws.annotations.core', 'enabled' : 'true', 'runInBatchMode' : 'false'})
-        for e in processorPath.split(os.pathsep):
-            out.element('factorypathentry', {'kind' : 'EXTJAR', 'id' : e, 'enabled' : 'true', 'runInBatchMode' : 'false'})
-        out.close('factorypath')
-        update_file(join(p.dir, '.factorypath'), out.xml(indent='\t', newl='\n'))
-        if files:
-            files.append(join(p.dir, '.factorypath'))
-
-def _eclipseinit_suite(args, suite, buildProcessorJars=True, refreshOnly=False):
-    configZip = TimeStampFile(join(suite.mxDir, 'eclipse-config.zip'))
-    configLibsZip = join(suite.mxDir, 'eclipse-config-libs.zip')
-    if refreshOnly and not configZip.exists():
-        return
-
-    if _check_ide_timestamp(suite, configZip, 'eclipse'):
-        logv('[Eclipse configurations are up to date - skipping]')
-        return
-
-    files = []
-    libFiles = []
-    if buildProcessorJars:
-        files += _processorjars_suite(suite)
-
-    for p in suite.projects:
-        if p.native:
-            continue
-        _eclipseinit_project(p, files, libFiles)
-
-    _, launchFile = make_eclipse_attach(suite, 'localhost', '8000', deps=sorted_deps(projectNames=None, includeLibs=True))
-    files.append(launchFile)
-
-    # Create an Eclipse project for each distribution that will create/update the archive
-    # for the distribution whenever any (transitively) dependent project of the
-    # distribution is updated.
-    for dist in suite.dists:
-        projectDir = dist.get_ide_project_dir()
-        if not projectDir:
-            continue
-        if not exists(projectDir):
-            os.makedirs(projectDir)
-        distProjects = [d for d in dist.sorted_deps(transitive=True) if d.isProject()]
-        relevantResources = []
-        for p in distProjects:
-            for srcDir in p.source_dirs():
-                relevantResources.append(join(p.name, os.path.relpath(srcDir, p.dir)))
-            relevantResources.append(join(p.name, os.path.relpath(p.output_dir(), p.dir)))
-        out = XMLDoc()
-        out.open('projectDescription')
-        out.element('name', data=dist.name)
-        out.element('comment', data='Updates ' + dist.path + ' if a project dependency of ' + dist.name + ' is updated')
-        out.open('projects')
-        for p in distProjects:
-            out.element('project', data=p.name)
-        for d in dist.distDependencies:
-            out.element('project', data=d)
-        out.close('projects')
-        out.open('buildSpec')
-        dist.dir = projectDir
-        dist.javaCompliance = max([p.javaCompliance for p in distProjects])
-        _genEclipseBuilder(out, dist, 'Create' + dist.name + 'Dist', 'archive @' + dist.name, relevantResources=relevantResources, logToFile=True, refresh=False, async=True)
-        out.close('buildSpec')
-        out.open('natures')
-        out.element('nature', data='org.eclipse.jdt.core.javanature')
-        out.close('natures')
-        out.close('projectDescription')
-        projectFile = join(projectDir, '.project')
-        update_file(projectFile, out.xml(indent='\t', newl='\n'))
-        files.append(projectFile)
-
-    _zip_files(files, suite.dir, configZip.path)
-    _zip_files(libFiles, suite.dir, configLibsZip)
-
-def _zip_files(files, baseDir, zipPath):
-    fd, tmp = tempfile.mkstemp(suffix='', prefix=basename(zipPath), dir=baseDir)
-    try:
-        zf = zipfile.ZipFile(tmp, 'w')
-        for f in sorted(set(files)):
-            relpath = os.path.relpath(f, baseDir)
-            arcname = relpath.replace(os.sep, '/')
-            zf.write(f, arcname)
-        zf.close()
-        os.close(fd)
-        # Atomic on Unix
-        shutil.move(tmp, zipPath)
-        # Correct the permissions on the temporary file which is created with restrictive permissions
-        os.chmod(zipPath, 0o666 & ~currentUmask)
-    finally:
-        if exists(tmp):
-            os.remove(tmp)
-
-def _genEclipseBuilder(dotProjectDoc, p, name, mxCommand, refresh=True, refreshFile=None, relevantResources=None, async=False, logToConsole=False, logToFile=False, appendToLogFile=True, xmlIndent='\t', xmlStandalone=None):
-    externalToolDir = join(p.dir, '.externalToolBuilders')
-    launchOut = XMLDoc()
-    consoleOn = 'true' if logToConsole else 'false'
-    launchOut.open('launchConfiguration', {'type' : 'org.eclipse.ui.externaltools.ProgramBuilderLaunchConfigurationType'})
-    launchOut.element('booleanAttribute', {'key' : 'org.eclipse.debug.core.capture_output', 'value': consoleOn})
-    launchOut.open('mapAttribute', {'key' : 'org.eclipse.debug.core.environmentVariables'})
-    launchOut.element('mapEntry', {'key' : 'JAVA_HOME', 'value' : _default_java_home.jdk})
-    launchOut.element('mapEntry', {'key' : 'EXTRA_JAVA_HOMES', 'value' :  os.pathsep.join([extraJavaHome.jdk for extraJavaHome in _extra_java_homes])})
-    launchOut.close('mapAttribute')
-
-    if refresh:
-        if refreshFile is None:
-            refreshScope = '${project}'
-        else:
-            refreshScope = '${working_set:<?xml version="1.0" encoding="UTF-8"?><resources><item path="' + refreshFile + '" type="1"/></resources>}'
-
-        launchOut.element('booleanAttribute', {'key' : 'org.eclipse.debug.core.ATTR_REFRESH_RECURSIVE', 'value':  'false'})
-        launchOut.element('stringAttribute', {'key' : 'org.eclipse.debug.core.ATTR_REFRESH_SCOPE', 'value':  refreshScope})
-
-    if relevantResources is not None:
-        resources = '${working_set:<?xml version="1.0" encoding="UTF-8"?><resources>'
-        for relevantResource in relevantResources:
-            resources += '<item path="' + relevantResource + '" type="2" />'
-        resources += '</resources>}'
-        launchOut.element('stringAttribute', {'key' : 'org.eclipse.ui.externaltools.ATTR_BUILD_SCOPE', 'value': resources})
-
-
-    launchOut.element('booleanAttribute', {'key' : 'org.eclipse.debug.ui.ATTR_CONSOLE_OUTPUT_ON', 'value': consoleOn})
-    launchOut.element('booleanAttribute', {'key' : 'org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND', 'value': 'true' if async else 'false'})
-    if logToFile:
-        logFile = join(externalToolDir, name + '.log')
-        launchOut.element('stringAttribute', {'key' : 'org.eclipse.debug.ui.ATTR_CAPTURE_IN_FILE', 'value': logFile})
-        launchOut.element('booleanAttribute', {'key' : 'org.eclipse.debug.ui.ATTR_APPEND_TO_FILE', 'value': 'true' if appendToLogFile else 'false'})
-
-    # expect to find the OS command to invoke mx in the same directory
-    baseDir = dirname(os.path.abspath(__file__))
-
-    cmd = 'mx.sh'
-    if get_os() == 'windows':
-        cmd = 'mx.cmd'
-    cmdPath = join(baseDir, cmd)
-    if not os.path.exists(cmdPath):
-        # backwards compatibility for when the commands lived in parent of mxtool
-        cmdPath = join(dirname(baseDir), cmd)
-        if not os.path.exists(cmdPath):
-            abort('cannot locate ' + cmd)
-
-    launchOut.element('stringAttribute', {'key' : 'org.eclipse.ui.externaltools.ATTR_LOCATION', 'value':  cmdPath})
-    launchOut.element('stringAttribute', {'key' : 'org.eclipse.ui.externaltools.ATTR_RUN_BUILD_KINDS', 'value': 'full,incremental,auto,'})
-    launchOut.element('stringAttribute', {'key' : 'org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS', 'value': mxCommand})
-    launchOut.element('booleanAttribute', {'key' : 'org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED', 'value': 'true'})
-    launchOut.element('stringAttribute', {'key' : 'org.eclipse.ui.externaltools.ATTR_WORKING_DIRECTORY', 'value': p.suite.dir})
-
-
-    launchOut.close('launchConfiguration')
-
-    if not exists(externalToolDir):
-        os.makedirs(externalToolDir)
-    update_file(join(externalToolDir, name + '.launch'), launchOut.xml(indent=xmlIndent, standalone=xmlStandalone, newl='\n'))
-
-    dotProjectDoc.open('buildCommand')
-    dotProjectDoc.element('name', data='org.eclipse.ui.externaltools.ExternalToolBuilder')
-    dotProjectDoc.element('triggers', data='auto,full,incremental,')
-    dotProjectDoc.open('arguments')
-    dotProjectDoc.open('dictionary')
-    dotProjectDoc.element('key', data='LaunchConfigHandle')
-    dotProjectDoc.element('value', data='<project>/.externalToolBuilders/' + name + '.launch')
-    dotProjectDoc.close('dictionary')
-    dotProjectDoc.open('dictionary')
-    dotProjectDoc.element('key', data='incclean')
-    dotProjectDoc.element('value', data='true')
-    dotProjectDoc.close('dictionary')
-    dotProjectDoc.close('arguments')
-    dotProjectDoc.close('buildCommand')
-
-def generate_eclipse_workingsets():
-    """
-    Populate the workspace's working set configuration with working sets generated from project data for the primary suite
-    If the workspace already contains working set definitions, the existing ones will be retained and extended.
-    In case mx/env does not contain a WORKSPACE definition pointing to the workspace root directory, a parent search from the primary suite directory is performed.
-    If no workspace root directory can be identified, the primary suite directory is used and the user has to place the workingsets.xml file by hand.
-    """
-
-    # identify the location where to look for workingsets.xml
-    wsfilename = 'workingsets.xml'
-    wsloc = '.metadata/.plugins/org.eclipse.ui.workbench'
-    if os.environ.has_key('WORKSPACE'):
-        expected_wsroot = os.environ['WORKSPACE']
-    else:
-        expected_wsroot = _primary_suite.dir
-
-    wsroot = _find_eclipse_wsroot(expected_wsroot)
-    if wsroot is None:
-        # failed to find it
-        wsroot = expected_wsroot
-
-    wsdir = join(wsroot, wsloc)
-    if not exists(wsdir):
-        wsdir = wsroot
-        logv('Could not find Eclipse metadata directory. Please place ' + wsfilename + ' in ' + wsloc + ' manually.')
-    wspath = join(wsdir, wsfilename)
-
-    # gather working set info from project data
-    workingSets = dict()
-    for p in projects():
-        if p.workingSets is None:
-            continue
-        for w in p.workingSets.split(","):
-            if not workingSets.has_key(w):
-                workingSets[w] = [p.name]
-            else:
-                workingSets[w].append(p.name)
-
-    if exists(wspath):
-        wsdoc = _copy_workingset_xml(wspath, workingSets)
-    else:
-        wsdoc = _make_workingset_xml(workingSets)
-
-    update_file(wspath, wsdoc.xml(newl='\n'))
-
-def _find_eclipse_wsroot(wsdir):
-    md = join(wsdir, '.metadata')
-    if exists(md):
-        return wsdir
-    split = os.path.split(wsdir)
-    if split[0] == wsdir:  # root directory
-        return None
-    else:
-        return _find_eclipse_wsroot(split[0])
-
-def _make_workingset_xml(workingSets):
-    wsdoc = XMLDoc()
-    wsdoc.open('workingSetManager')
-
-    for w in sorted(workingSets.keys()):
-        _workingset_open(wsdoc, w)
-        for p in workingSets[w]:
-            _workingset_element(wsdoc, p)
-        wsdoc.close('workingSet')
-
-    wsdoc.close('workingSetManager')
-    return wsdoc
-
-def _copy_workingset_xml(wspath, workingSets):
-    target = XMLDoc()
-    target.open('workingSetManager')
-
-    parser = xml.parsers.expat.ParserCreate()
-
-    class ParserState(object):
-        def __init__(self):
-            self.current_ws_name = 'none yet'
-            self.current_ws = None
-            self.seen_ws = list()
-            self.seen_projects = list()
-            self.aggregate_ws = False
-            self.nested_ws = False
-
-    ps = ParserState()
-
-    # parsing logic
-    def _ws_start(name, attributes):
-        if name == 'workingSet':
-            if attributes.has_key('name'):
-                ps.current_ws_name = attributes['name']
-                if attributes.has_key('aggregate') and attributes['aggregate'] == 'true':
-                    ps.aggregate_ws = True
-                    ps.current_ws = None
-                elif workingSets.has_key(ps.current_ws_name):
-                    ps.current_ws = workingSets[ps.current_ws_name]
-                    ps.seen_ws.append(ps.current_ws_name)
-                    ps.seen_projects = list()
-                else:
-                    ps.current_ws = None
-            target.open(name, attributes)
-            parser.StartElementHandler = _ws_item
-
-    def _ws_end(name):
-        closeAndResetHandler = False
-        if name == 'workingSet':
-            if ps.aggregate_ws:
-                if ps.nested_ws:
-                    ps.nested_ws = False
-                else:
-                    ps.aggregate_ws = False
-                    closeAndResetHandler = True
-            else:
-                if not ps.current_ws is None:
-                    for p in ps.current_ws:
-                        if not p in ps.seen_projects:
-                            _workingset_element(target, p)
-                closeAndResetHandler = True
-            if closeAndResetHandler:
-                target.close('workingSet')
-                parser.StartElementHandler = _ws_start
-        elif name == 'workingSetManager':
-            # process all working sets that are new to the file
-            for w in sorted(workingSets.keys()):
-                if not w in ps.seen_ws:
-                    _workingset_open(target, w)
-                    for p in workingSets[w]:
-                        _workingset_element(target, p)
-                    target.close('workingSet')
-
-    def _ws_item(name, attributes):
-        if name == 'item':
-            if ps.current_ws is None:
-                target.element(name, attributes)
-            elif not attributes.has_key('elementID') and attributes.has_key('factoryID') and attributes.has_key('path') and attributes.has_key('type'):
-                target.element(name, attributes)
-                p_name = attributes['path'][1:]  # strip off the leading '/'
-                ps.seen_projects.append(p_name)
-            else:
-                p_name = attributes['elementID'][1:]  # strip off the leading '='
-                _workingset_element(target, p_name)
-                ps.seen_projects.append(p_name)
-        elif name == 'workingSet':
-            ps.nested_ws = True
-            target.element(name, attributes)
-
-    # process document
-    parser.StartElementHandler = _ws_start
-    parser.EndElementHandler = _ws_end
-    with open(wspath, 'r') as wsfile:
-        parser.ParseFile(wsfile)
-
-    target.close('workingSetManager')
-    return target
-
-def _workingset_open(wsdoc, ws):
-    wsdoc.open('workingSet', {'editPageID': 'org.eclipse.jdt.ui.JavaWorkingSetPage', 'factoryID': 'org.eclipse.ui.internal.WorkingSetFactory', 'id': 'wsid_' + ws, 'label': ws, 'name': ws})
-
-def _workingset_element(wsdoc, p):
-    wsdoc.element('item', {'elementID': '=' + p, 'factoryID': 'org.eclipse.jdt.ui.PersistableJavaElementFactory'})
-
-def netbeansinit(args, refreshOnly=False, buildProcessorJars=True):
-    """(re)generate NetBeans project configurations"""
-
-    for suite in suites(True):
-        _netbeansinit_suite(args, suite, refreshOnly, buildProcessorJars)
-
-def _netbeansinit_project(p, jdks=None, files=None, libFiles=None):
-    if not exists(join(p.dir, 'nbproject')):
-        os.makedirs(join(p.dir, 'nbproject'))
-
-    jdk = java(p.javaCompliance)
-    assert jdk
-
-    if jdks:
-        jdks.add(jdk)
-
-    out = XMLDoc()
-    out.open('project', {'name' : p.name, 'default' : 'default', 'basedir' : '.'})
-    out.element('description', data='Builds, tests, and runs the project ' + p.name + '.')
-    out.element('import', {'file' : 'nbproject/build-impl.xml'})
-    out.open('target', {'name' : '-post-init'})
-    out.open('pathconvert', {'property' : 'comma.javac.classpath', 'pathsep' : ','})
-    out.element('path', {'path' : '${javac.classpath}'})
-    out.close('pathconvert')
-
-    out.open('restrict', {'id' : 'missing.javac.classpath'})
-    out.element('filelist', {'dir' : '${basedir}', 'files' : '${comma.javac.classpath}'})
-    out.open('not')
-    out.element('exists')
-    out.close('not')
-    out.close('restrict')
-
-    out.element('property', {'name' : 'missing.javac.classpath', 'refid' : 'missing.javac.classpath'})
-
-    out.open('condition', {'property' : 'no.dependencies', 'value' : 'true'})
-    out.element('equals', {'arg1' : '${missing.javac.classpath}', 'arg2' : ''})
-    out.close('condition')
-
-    out.element('property', {'name' : 'no.dependencies', 'value' : 'false'})
-
-    out.open('condition', {'property' : 'no.deps'})
-    out.element('equals', {'arg1' : '${no.dependencies}', 'arg2' : 'true'})
-    out.close('condition')
-
-    out.close('target')
-    out.open('target', {'name' : 'compile'})
-    out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true'})
-    out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.jdk})
-    out.element('arg', {'value' : os.path.abspath(__file__)})
-    out.element('arg', {'value' : 'build'})
-    out.element('arg', {'value' : '--only'})
-    out.element('arg', {'value' : p.name})
-    out.element('arg', {'value' : '--force-javac'})
-    out.element('arg', {'value' : '--no-native'})
-    out.close('exec')
-    out.close('target')
-    out.open('target', {'name' : 'jar', 'depends' : 'compile'})
-    out.close('target')
-    out.open('target', {'name' : 'run', 'depends' : 'compile'})
-    out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true'})
-    out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.jdk})
-    out.element('arg', {'value' : os.path.abspath(__file__)})
-    out.element('arg', {'value' : 'unittest'})
-    out.element('arg', {'value' : p.name})
-    out.close('exec')
-    out.close('target')
-    out.open('target', {'name' : 'debug', 'depends' : 'init,compile'})
-    out.open('nbjpdastart', {'addressproperty' : 'jpda.address', 'name' : p.name, })
-    out.open('classpath')
-    out.element('path', {'path' : '${javac.classpath}'})
-    out.close('classpath')
-    out.open('sourcepath')
-    out.element('pathelement', {'location' : 'src'})
-    out.close('sourcepath')
-    out.close('nbjpdastart')
-    out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true'})
-    out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.jdk})
-    out.element('arg', {'value' : os.path.abspath(__file__)})
-    out.element('arg', {'value' : '-d'})
-    out.element('arg', {'value' : '--attach'})
-    out.element('arg', {'value' : '${jpda.address}'})
-    out.element('arg', {'value' : 'unittest'})
-    out.element('arg', {'value' : p.name})
-    out.close('exec')
-    out.close('target')
-    out.open('target', {'name' : 'javadoc'})
-    out.open('exec', {'executable' : sys.executable, 'failonerror' : 'true'})
-    out.element('env', {'key' : 'JAVA_HOME', 'value' : jdk.jdk})
-    out.element('arg', {'value' : os.path.abspath(__file__)})
-    out.element('arg', {'value' : 'javadoc'})
-    out.element('arg', {'value' : '--projects'})
-    out.element('arg', {'value' : p.name})
-    out.element('arg', {'value' : '--force'})
-    out.close('exec')
-    out.element('nbbrowse', {'file' : 'javadoc/index.html'})
-    out.close('target')
-    out.close('project')
-    update_file(join(p.dir, 'build.xml'), out.xml(indent='\t', newl='\n'))
-    if files:
-        files.append(join(p.dir, 'build.xml'))
-
-    out = XMLDoc()
-    out.open('project', {'xmlns' : 'http://www.netbeans.org/ns/project/1'})
-    out.element('type', data='org.netbeans.modules.java.j2seproject')
-    out.open('configuration')
-    out.open('data', {'xmlns' : 'http://www.netbeans.org/ns/j2se-project/3'})
-    out.element('name', data=p.name)
-    out.element('explicit-platform', {'explicit-source-supported' : 'true'})
-    out.open('source-roots')
-    out.element('root', {'id' : 'src.dir'})
-    if len(p.annotation_processors()) > 0:
-        out.element('root', {'id' : 'src.ap-source-output.dir', 'name' : 'Generated Packages'})
-    out.close('source-roots')
-    out.open('test-roots')
-    out.close('test-roots')
-    out.close('data')
-
-    firstDep = True
-    for dep in p.all_deps([], includeLibs=False, includeAnnotationProcessors=True):
-        if dep == p:
-            continue
-
-        if dep.isProject():
-            n = dep.name.replace('.', '_')
-            if firstDep:
-                out.open('references', {'xmlns' : 'http://www.netbeans.org/ns/ant-project-references/1'})
-                firstDep = False
-
-            out.open('reference')
-            out.element('foreign-project', data=n)
-            out.element('artifact-type', data='jar')
-            out.element('script', data='build.xml')
-            out.element('target', data='jar')
-            out.element('clean-target', data='clean')
-            out.element('id', data='jar')
-            out.close('reference')
-
-    if not firstDep:
-        out.close('references')
-
-    out.close('configuration')
-    out.close('project')
-    update_file(join(p.dir, 'nbproject', 'project.xml'), out.xml(indent='    ', newl='\n'))
-    if files:
-        files.append(join(p.dir, 'nbproject', 'project.xml'))
-
-    out = StringIO.StringIO()
-    jdkPlatform = 'JDK_' + str(jdk.version)
-
-    annotationProcessorEnabled = "false"
-    annotationProcessorSrcFolder = ""
-    if len(p.annotation_processors()) > 0:
-        annotationProcessorEnabled = "true"
-        genSrcDir = p.source_gen_dir()
-        if not exists(genSrcDir):
-            os.makedirs(genSrcDir)
-        annotationProcessorSrcFolder = "src.ap-source-output.dir=" + genSrcDir
-
-    content = """
-annotation.processing.enabled=""" + annotationProcessorEnabled + """
-annotation.processing.enabled.in.editor=""" + annotationProcessorEnabled + """
-annotation.processing.processors.list=
-annotation.processing.run.all.processors=true
-application.title=""" + p.name + """
-application.vendor=mx
-build.classes.dir=${build.dir}
-build.classes.excludes=**/*.java,**/*.form
-# This directory is removed when the project is cleaned:
-build.dir=bin
-build.generated.sources.dir=${build.dir}/generated-sources
-# Only compile against the classpath explicitly listed here:
-build.sysclasspath=ignore
-build.test.classes.dir=${build.dir}/test/classes
-build.test.results.dir=${build.dir}/test/results
-# Uncomment to specify the preferred debugger connection transport:
-#debug.transport=dt_socket
-debug.classpath=\\
-${run.classpath}
-debug.test.classpath=\\
-${run.test.classpath}
-# This directory is removed when the project is cleaned:
-dist.dir=dist
-dist.jar=${dist.dir}/""" + p.name + """.jar
-dist.javadoc.dir=${dist.dir}/javadoc
-endorsed.classpath=
-excludes=
-includes=**
-jar.compress=false
-# Space-separated list of extra javac options
-javac.compilerargs=-XDignore.symbol.file
-javac.deprecation=false
-javac.source=""" + str(p.javaCompliance) + """
-javac.target=""" + str(p.javaCompliance) + """
-javac.test.classpath=\\
-${javac.classpath}:\\
-${build.classes.dir}
-javadoc.additionalparam=
-javadoc.author=false
-javadoc.encoding=${source.encoding}
-javadoc.noindex=false
-javadoc.nonavbar=false
-javadoc.notree=false
-javadoc.private=false
-javadoc.splitindex=true
-javadoc.use=true
-javadoc.version=false
-javadoc.windowtitle=
-main.class=com.oracle.truffle.api.impl.Accessor
-manifest.file=manifest.mf
-meta.inf.dir=${src.dir}/META-INF
-mkdist.disabled=false
-platforms.""" + jdkPlatform + """.home=""" + jdk.jdk + """
-platform.active=""" + jdkPlatform + """
-run.classpath=\\
-${javac.classpath}:\\
-${build.classes.dir}
-# Space-separated list of JVM arguments used when running the project
-# (you may also define separate properties like run-sys-prop.name=value instead of -Dname=value
-# or test-sys-prop.name=value to set system properties for unit tests):
-run.jvmargs=
-run.test.classpath=\\
-${javac.test.classpath}:\\
-${build.test.classes.dir}
-test.src.dir=./test
-""" + annotationProcessorSrcFolder + """
-source.encoding=UTF-8""".replace(':', os.pathsep).replace('/', os.sep)
-    print >> out, content
-
-    mainSrc = True
-    for src in p.srcDirs:
-        srcDir = join(p.dir, src)
-        if not exists(srcDir):
-            os.mkdir(srcDir)
-        ref = 'file.reference.' + p.name + '-' + src
-        print >> out, ref + '=' + src
-        if mainSrc:
-            print >> out, 'src.dir=${' + ref + '}'
-            mainSrc = False
-        else:
-            print >> out, 'src.' + src + '.dir=${' + ref + '}'
-
-    javacClasspath = []
-
-    deps = p.all_deps([], True)
-    annotationProcessorOnlyDeps = []
-    if len(p.annotation_processors()) > 0:
-        for ap in p.annotation_processors():
-            apDep = dependency(ap)
-            if not apDep in deps:
-                deps.append(apDep)
-                annotationProcessorOnlyDeps.append(apDep)
-
-    annotationProcessorReferences = []
-
-    for dep in deps:
-        if dep == p:
-            continue
-
-        if dep.isLibrary():
-            path = dep.get_path(resolve=True)
-            if path:
-                if os.sep == '\\':
-                    path = path.replace('\\', '\\\\')
-                ref = 'file.reference.' + dep.name + '-bin'
-                print >> out, ref + '=' + path
-                if libFiles:
-                    libFiles.append(path)
-
-        elif dep.isProject():
-            n = dep.name.replace('.', '_')
-            relDepPath = os.path.relpath(dep.dir, p.dir).replace(os.sep, '/')
-            ref = 'reference.' + n + '.jar'
-            print >> out, 'project.' + n + '=' + relDepPath
-            print >> out, ref + '=${project.' + n + '}/dist/' + dep.name + '.jar'
-
-        if not dep in annotationProcessorOnlyDeps:
-            javacClasspath.append('${' + ref + '}')
-        else:
-            annotationProcessorReferences.append('${' + ref + '}')
-
-    print >> out, 'javac.classpath=\\\n    ' + (os.pathsep + '\\\n    ').join(javacClasspath)
-    print >> out, 'javac.processorpath=' + (os.pathsep + '\\\n    ').join(['${javac.classpath}'] + annotationProcessorReferences)
-    print >> out, 'javac.test.processorpath=' + (os.pathsep + '\\\n    ').join(['${javac.test.classpath}'] + annotationProcessorReferences)
-
-    update_file(join(p.dir, 'nbproject', 'project.properties'), out.getvalue())
-    out.close()
-    if files:
-        files.append(join(p.dir, 'nbproject', 'project.properties'))
-
-def _netbeansinit_suite(args, suite, refreshOnly=False, buildProcessorJars=True):
-    configZip = TimeStampFile(join(suite.mxDir, 'netbeans-config.zip'))
-    configLibsZip = join(suite.mxDir, 'eclipse-config-libs.zip')
-    if refreshOnly and not configZip.exists():
-        return
-
-    if _check_ide_timestamp(suite, configZip, 'netbeans'):
-        logv('[NetBeans configurations are up to date - skipping]')
-        return
-
-    files = []
-    libFiles = []
-    jdks = set()
-    for p in suite.projects:
-        if p.native:
-            continue
-
-        if exists(join(p.dir, 'plugin.xml')):  # eclipse plugin project
-            continue
-
-        _netbeansinit_project(p, jdks, files, libFiles)
-
-    log('If using NetBeans:')
-    # http://stackoverflow.com/questions/24720665/cant-resolve-jdk-internal-package
-    log('  1. Edit etc/netbeans.conf in your NetBeans installation and modify netbeans_default_options variable to include "-J-DCachingArchiveProvider.disableCtSym=true"')
-    log('  2. Ensure that the following platform(s) are defined (Tools -> Java Platforms):')
-    for jdk in jdks:
-        log('        JDK_' + str(jdk.version))
-    log('  3. Open/create a Project Group for the directory containing the projects (File -> Project Group -> New Group... -> Folder of Projects)')
-
-    _zip_files(files, suite.dir, configZip.path)
-    _zip_files(libFiles, suite.dir, configLibsZip)
-
-def intellijinit(args, refreshOnly=False):
-    """(re)generate Intellij project configurations"""
-
-    for suite in suites(True):
-        _intellij_suite(args, suite, refreshOnly)
-
-def _intellij_suite(args, suite, refreshOnly=False):
-
-    libraries = set()
-
-    ideaProjectDirectory = join(suite.dir, '.idea')
-
-    if not exists(ideaProjectDirectory):
-        os.mkdir(ideaProjectDirectory)
-    nameFile = join(ideaProjectDirectory, '.name')
-    update_file(nameFile, "Graal")
-    modulesXml = XMLDoc()
-    modulesXml.open('project', attributes={'version': '4'})
-    modulesXml.open('component', attributes={'name': 'ProjectModuleManager'})
-    modulesXml.open('modules')
-
-
-    def _intellij_exclude_if_exists(xml, p, name):
-        path = join(p.dir, name)
-        if exists(path):
-            xml.element('excludeFolder', attributes={'url':'file://$MODULE_DIR$/' + name})
-
-    annotationProcessorProfiles = {}
-
-    def _complianceToIntellijLanguageLevel(compliance):
-        return 'JDK_1_' + str(compliance.value)
-
-    # create the modules (1 module  = 1 Intellij project)
-    for p in suite.projects:
-        if p.native:
-            continue
-
-        assert java(p.javaCompliance)
-
-        if not exists(p.dir):
-            os.makedirs(p.dir)
-
-        annotationProcessorProfileKey = tuple(p.annotation_processors())
-
-        if not annotationProcessorProfileKey in annotationProcessorProfiles:
-            annotationProcessorProfiles[annotationProcessorProfileKey] = [p]
-        else:
-            annotationProcessorProfiles[annotationProcessorProfileKey].append(p)
-
-        intellijLanguageLevel = _complianceToIntellijLanguageLevel(p.javaCompliance)
-
-        moduleXml = XMLDoc()
-        moduleXml.open('module', attributes={'type': 'JAVA_MODULE', 'version': '4'})
-
-        moduleXml.open('component', attributes={'name': 'NewModuleRootManager', 'LANGUAGE_LEVEL': intellijLanguageLevel, 'inherit-compiler-output': 'false'})
-        moduleXml.element('output', attributes={'url': 'file://$MODULE_DIR$/bin'})
-        moduleXml.element('exclude-output')
-
-        moduleXml.open('content', attributes={'url': 'file://$MODULE_DIR$'})
-        for src in p.srcDirs:
-            srcDir = join(p.dir, src)
-            if not exists(srcDir):
-                os.mkdir(srcDir)
-            moduleXml.element('sourceFolder', attributes={'url':'file://$MODULE_DIR$/' + src, 'isTestSource': 'false'})
-
-        if len(p.annotation_processors()) > 0:
-            genDir = p.source_gen_dir()
-            if not exists(genDir):
-                os.mkdir(genDir)
-            moduleXml.element('sourceFolder', attributes={'url':'file://$MODULE_DIR$/' + os.path.relpath(genDir, p.dir), 'isTestSource': 'false'})
-
-        for name in ['.externalToolBuilders', '.settings', 'nbproject']:
-            _intellij_exclude_if_exists(moduleXml, p, name)
-        moduleXml.close('content')
-
-        moduleXml.element('orderEntry', attributes={'type': 'jdk', 'jdkType': 'JavaSDK', 'jdkName': str(p.javaCompliance)})
-        moduleXml.element('orderEntry', attributes={'type': 'sourceFolder', 'forTests': 'false'})
-
-        deps = p.all_deps([], True, includeAnnotationProcessors=True)
-        for dep in deps:
-            if dep == p:
-                continue
-
-            if dep.isLibrary():
-                libraries.add(dep)
-                moduleXml.element('orderEntry', attributes={'type': 'library', 'name': dep.name, 'level': 'project'})
-            elif dep.isProject():
-                moduleXml.element('orderEntry', attributes={'type': 'module', 'module-name': dep.name})
-
-        moduleXml.close('component')
-        moduleXml.close('module')
-        moduleFile = join(p.dir, p.name + '.iml')
-        update_file(moduleFile, moduleXml.xml(indent='  ', newl='\n'))
-
-        moduleFilePath = "$PROJECT_DIR$/" + os.path.relpath(moduleFile, suite.dir)
-        modulesXml.element('module', attributes={'fileurl': 'file://' + moduleFilePath, 'filepath': moduleFilePath})
-
-    modulesXml.close('modules')
-    modulesXml.close('component')
-    modulesXml.close('project')
-    moduleXmlFile = join(ideaProjectDirectory, 'modules.xml')
-    update_file(moduleXmlFile, modulesXml.xml(indent='  ', newl='\n'))
-
-    # TODO What about cross-suite dependencies?
-
-    librariesDirectory = join(ideaProjectDirectory, 'libraries')
-
-    if not exists(librariesDirectory):
-        os.mkdir(librariesDirectory)
-
-    # Setup the libraries that were used above
-    # TODO: setup all the libraries from the suite regardless of usage?
-    for library in libraries:
-        libraryXml = XMLDoc()
-
-        libraryXml.open('component', attributes={'name': 'libraryTable'})
-        libraryXml.open('library', attributes={'name': library.name})
-        libraryXml.open('CLASSES')
-        libraryXml.element('root', attributes={'url': 'jar://$PROJECT_DIR$/' + os.path.relpath(library.get_path(True), suite.dir) + '!/'})
-        libraryXml.close('CLASSES')
-        libraryXml.element('JAVADOC')
-        if library.sourcePath:
-            libraryXml.open('SOURCES')
-            libraryXml.element('root', attributes={'url': 'jar://$PROJECT_DIR$/' + os.path.relpath(library.get_source_path(True), suite.dir) + '!/'})
-            libraryXml.close('SOURCES')
-        else:
-            libraryXml.element('SOURCES')
-        libraryXml.close('library')
-        libraryXml.close('component')
-
-        libraryFile = join(librariesDirectory, library.name + '.xml')
-        update_file(libraryFile, libraryXml.xml(indent='  ', newl='\n'))
-
-
-
-    # Set annotation processor profiles up, and link them to modules in compiler.xml
-    compilerXml = XMLDoc()
-    compilerXml.open('project', attributes={'version': '4'})
-    compilerXml.open('component', attributes={'name': 'CompilerConfiguration'})
-
-    compilerXml.element('option', attributes={'name': "DEFAULT_COMPILER", 'value': 'Javac'})
-    compilerXml.element('resourceExtensions')
-    compilerXml.open('wildcardResourcePatterns')
-    compilerXml.element('entry', attributes={'name': '!?*.java'})
-    compilerXml.close('wildcardResourcePatterns')
-
-    if annotationProcessorProfiles:
-        compilerXml.open('annotationProcessing')
-        for processors, modules in sorted(annotationProcessorProfiles.iteritems()):
-            compilerXml.open('profile', attributes={'default': 'false', 'name': '-'.join(processors), 'enabled': 'true'})
-            compilerXml.element('sourceOutputDir', attributes={'name': 'src_gen'})  # TODO use p.source_gen_dir() ?
-            compilerXml.element('outputRelativeToContentRoot', attributes={'value': 'true'})
-            compilerXml.open('processorPath', attributes={'useClasspath': 'false'})
-            for apName in processors:
-                pDep = dependency(apName)
-                for entry in pDep.all_deps([], True):
-                    if entry.isLibrary():
-                        compilerXml.element('entry', attributes={'name': '$PROJECT_DIR$/' + os.path.relpath(entry.path, suite.dir)})
-                    elif entry.isProject():
-                        assert entry.isProject()
-                        compilerXml.element('entry', attributes={'name': '$PROJECT_DIR$/' + os.path.relpath(entry.output_dir(), suite.dir)})
-            compilerXml.close('processorPath')
-            for module in modules:
-                compilerXml.element('module', attributes={'name': module.name})
-            compilerXml.close('profile')
-        compilerXml.close('annotationProcessing')
-
-    compilerXml.close('component')
-    compilerXml.close('project')
-    compilerFile = join(ideaProjectDirectory, 'compiler.xml')
-    update_file(compilerFile, compilerXml.xml(indent='  ', newl='\n'))
-
-    # Wite misc.xml for global JDK config
-    miscXml = XMLDoc()
-    miscXml.open('project', attributes={'version': '4'})
-    miscXml.element('component', attributes={'name': 'ProjectRootManager', 'version': '2', 'languageLevel': _complianceToIntellijLanguageLevel(java().javaCompliance), 'project-jdk-name': str(java().javaCompliance), 'project-jdk-type': 'JavaSDK'})
-    miscXml.close('project')
-    miscFile = join(ideaProjectDirectory, 'misc.xml')
-    update_file(miscFile, miscXml.xml(indent='  ', newl='\n'))
-
-
-    # TODO look into copyright settings
-    # TODO should add vcs.xml support
-
-def ideclean(args):
-    """remove all Eclipse and NetBeans project configurations"""
-    def rm(path):
-        if exists(path):
-            os.remove(path)
-
-    for s in suites():
-        rm(join(s.mxDir, 'eclipse-config.zip'))
-        rm(join(s.mxDir, 'netbeans-config.zip'))
-        shutil.rmtree(join(s.dir, '.idea'), ignore_errors=True)
-
-    for p in projects():
-        if p.native:
-            continue
-
-        shutil.rmtree(join(p.dir, '.settings'), ignore_errors=True)
-        shutil.rmtree(join(p.dir, '.externalToolBuilders'), ignore_errors=True)
-        shutil.rmtree(join(p.dir, 'nbproject'), ignore_errors=True)
-        rm(join(p.dir, '.classpath'))
-        rm(join(p.dir, '.checkstyle'))
-        rm(join(p.dir, '.project'))
-        rm(join(p.dir, '.factorypath'))
-        rm(join(p.dir, p.name + '.iml'))
-        rm(join(p.dir, 'build.xml'))
-        rm(join(p.dir, 'eclipse-build.xml'))
-        try:
-            rm(join(p.dir, p.name + '.jar'))
-        except:
-            log("Error removing {0}".format(p.name + '.jar'))
-
-    for d in _dists.itervalues():
-        if d.get_ide_project_dir():
-            shutil.rmtree(d.get_ide_project_dir(), ignore_errors=True)
-
-def ideinit(args, refreshOnly=False, buildProcessorJars=True):
-    """(re)generate Eclipse, NetBeans and Intellij project configurations"""
-    eclipseinit(args, refreshOnly=refreshOnly, buildProcessorJars=buildProcessorJars)
-    netbeansinit(args, refreshOnly=refreshOnly, buildProcessorJars=buildProcessorJars)
-    intellijinit(args, refreshOnly=refreshOnly)
-    if not refreshOnly:
-        fsckprojects([])
-
-def fsckprojects(args):
-    """find directories corresponding to deleted Java projects and delete them"""
-    if not is_interactive():
-        log('fsckprojects command must be run in an interactive shell')
-        return
-    hg = HgConfig()
-    for suite in suites(True):
-        projectDirs = [p.dir for p in suite.projects]
-        distIdeDirs = [d.get_ide_project_dir() for d in suite.dists if d.get_ide_project_dir() is not None]
-        for dirpath, dirnames, files in os.walk(suite.dir):
-            if dirpath == suite.dir:
-                # no point in traversing .hg, lib, or .workspace
-                dirnames[:] = [d for d in dirnames if d not in ['.hg', 'lib', '.workspace']]
-            elif dirpath in projectDirs:
-                # don't traverse subdirs of an existing project in this suite
-                dirnames[:] = []
-            elif dirpath in distIdeDirs:
-                # don't traverse subdirs of an existing distributions in this suite
-                dirnames[:] = []
-            else:
-                projectConfigFiles = frozenset(['.classpath', '.project', 'nbproject'])
-                indicators = projectConfigFiles.intersection(files)
-                if len(indicators) != 0:
-                    indicators = [os.path.relpath(join(dirpath, i), suite.dir) for i in indicators]
-                    indicatorsInHg = hg.locate(suite.dir, indicators)
-                    # Only proceed if there are indicator files that are not under HG
-                    if len(indicators) > len(indicatorsInHg):
-                        if not is_interactive() or ask_yes_no(dirpath + ' looks like a removed project -- delete it', 'n'):
-                            shutil.rmtree(dirpath)
-                            log('Deleted ' + dirpath)
-
-def javadoc(args, parser=None, docDir='javadoc', includeDeps=True, stdDoclet=True):
-    """generate javadoc for some/all Java projects"""
-
-    parser = ArgumentParser(prog='mx javadoc') if parser is None else parser
-    parser.add_argument('-d', '--base', action='store', help='base directory for output')
-    parser.add_argument('--unified', action='store_true', help='put javadoc in a single directory instead of one per project')
-    parser.add_argument('--force', action='store_true', help='(re)generate javadoc even if package-list file exists')
-    parser.add_argument('--projects', action='store', help='comma separated projects to process (omit to process all projects)')
-    parser.add_argument('--Wapi', action='store_true', dest='warnAPI', help='show warnings about using internal APIs')
-    parser.add_argument('--argfile', action='store', help='name of file containing extra javadoc options')
-    parser.add_argument('--arg', action='append', dest='extra_args', help='extra Javadoc arguments (e.g. --arg @-use)', metavar='@<arg>', default=[])
-    parser.add_argument('-m', '--memory', action='store', help='-Xmx value to pass to underlying JVM')
-    parser.add_argument('--packages', action='store', help='comma separated packages to process (omit to process all packages)')
-    parser.add_argument('--exclude-packages', action='store', help='comma separated packages to exclude')
-
-    args = parser.parse_args(args)
-
-    # build list of projects to be processed
-    if args.projects is not None:
-        candidates = [project(name) for name in args.projects.split(',')]
-    else:
-        candidates = projects_opt_limit_to_suites()
-
-    # optionally restrict packages within a project
-    packages = []
-    if args.packages is not None:
-        packages = [name for name in args.packages.split(',')]
-
-    exclude_packages = []
-    if args.exclude_packages is not None:
-        exclude_packages = [name for name in args.exclude_packages.split(',')]
-
-    def outDir(p):
-        if args.base is None:
-            return join(p.dir, docDir)
-        return join(args.base, p.name, docDir)
-
-    def check_package_list(p):
-        return not exists(join(outDir(p), 'package-list'))
-
-    def assess_candidate(p, projects):
-        if p in projects:
-            return False
-        if args.force or args.unified or check_package_list(p):
-            projects.append(p)
-            return True
-        return False
-
-    projects = []
-    for p in candidates:
-        if not p.native:
-            if includeDeps:
-                deps = p.all_deps([], includeLibs=False, includeSelf=False)
-                for d in deps:
-                    assess_candidate(d, projects)
-            if not assess_candidate(p, projects):
-                logv('[package-list file exists - skipping {0}]'.format(p.name))
-
-
-    def find_packages(sourceDirs, pkgs=None):
-        if pkgs is None:
-            pkgs = set()
-        for sourceDir in sourceDirs:
-            for root, _, files in os.walk(sourceDir):
-                if len([name for name in files if name.endswith('.java')]) != 0:
-                    pkg = root[len(sourceDir) + 1:].replace(os.sep, '.')
-                    if len(packages) == 0 or pkg in packages:
-                        if len(exclude_packages) == 0 or not pkg in exclude_packages:
-                            pkgs.add(pkg)
-        return pkgs
-
-    extraArgs = [a.lstrip('@') for a in args.extra_args]
-    if args.argfile is not None:
-        extraArgs += ['@' + args.argfile]
-    memory = '2g'
-    if args.memory is not None:
-        memory = args.memory
-    memory = '-J-Xmx' + memory
-
-    if not args.unified:
-        for p in projects:
-            # The project must be built to ensure javadoc can find class files for all referenced classes
-            build(['--no-native', '--projects', p.name])
-
-            pkgs = find_packages(p.source_dirs(), set())
-            deps = p.all_deps([], includeLibs=False, includeSelf=False)
-            links = ['-link', 'http://docs.oracle.com/javase/' + str(p.javaCompliance.value) + '/docs/api/']
-            out = outDir(p)
-            for d in deps:
-                depOut = outDir(d)
-                links.append('-link')
-                links.append(os.path.relpath(depOut, out))
-            cp = classpath(p.name, includeSelf=True)
-            sp = os.pathsep.join(p.source_dirs())
-            overviewFile = join(p.dir, 'overview.html')
-            delOverviewFile = False
-            if not exists(overviewFile):
-                with open(overviewFile, 'w') as fp:
-                    print >> fp, '<html><body>Documentation for the <code>' + p.name + '</code> project.</body></html>'
-                delOverviewFile = True
-            nowarnAPI = []
-            if not args.warnAPI:
-                nowarnAPI.append('-XDignore.symbol.file')
-
-            # windowTitle onloy applies to the standard doclet processor
-            windowTitle = []
-            if stdDoclet:
-                windowTitle = ['-windowtitle', p.name + ' javadoc']
-            try:
-                log('Generating {2} for {0} in {1}'.format(p.name, out, docDir))
-                projectJava = java(p.javaCompliance)
-
-                # Once https://bugs.openjdk.java.net/browse/JDK-8041628 is fixed,
-                # this should be reverted to:
-                # javadocExe = java().javadoc
-                # we can then also respect _opts.relatex_compliance
-                javadocExe = projectJava.javadoc
-
-                run([javadocExe, memory,
-                     '-XDignore.symbol.file',
-                     '-classpath', cp,
-                     '-quiet',
-                     '-d', out,
-                     '-overview', overviewFile,
-                     '-sourcepath', sp,
-                     '-source', str(projectJava.javaCompliance)] +
-                     projectJava.javadocLibOptions([]) +
-                     ([] if projectJava.javaCompliance < JavaCompliance('1.8') else ['-Xdoclint:none']) +
-                     links +
-                     extraArgs +
-                     nowarnAPI +
-                     windowTitle +
-                     list(pkgs))
-                log('Generated {2} for {0} in {1}'.format(p.name, out, docDir))
-            finally:
-                if delOverviewFile:
-                    os.remove(overviewFile)
-
-    else:
-        # The projects must be built to ensure javadoc can find class files for all referenced classes
-        build(['--no-native'])
-
-        pkgs = set()
-        sp = []
-        names = []
-        for p in projects:
-            find_packages(p.source_dirs(), pkgs)
-            sp += p.source_dirs()
-            names.append(p.name)
-
-        links = ['-link', 'http://docs.oracle.com/javase/' + str(java().javaCompliance.value) + '/docs/api/']
-        out = join(_primary_suite.dir, docDir)
-        if args.base is not None:
-            out = join(args.base, docDir)
-        cp = classpath()
-        sp = os.pathsep.join(sp)
-        nowarnAPI = []
-        if not args.warnAPI:
-            nowarnAPI.append('-XDignore.symbol.file')
-        log('Generating {2} for {0} in {1}'.format(', '.join(names), out, docDir))
-        run([java().javadoc, memory,
-             '-classpath', cp,
-             '-quiet',
-             '-d', out,
-             '-sourcepath', sp] +
-             ([] if java().javaCompliance < JavaCompliance('1.8') else ['-Xdoclint:none']) +
-             links +
-             extraArgs +
-             nowarnAPI +
-             list(pkgs))
-        log('Generated {2} for {0} in {1}'.format(', '.join(names), out, docDir))
-
-def site(args):
-    """creates a website containing javadoc and the project dependency graph"""
-
-    parser = ArgumentParser(prog='site')
-    parser.add_argument('-d', '--base', action='store', help='directory for generated site', required=True, metavar='<dir>')
-    parser.add_argument('--tmp', action='store', help='directory to use for intermediate results', metavar='<dir>')
-    parser.add_argument('--name', action='store', help='name of overall documentation', required=True, metavar='<name>')
-    parser.add_argument('--overview', action='store', help='path to the overview content for overall documentation', required=True, metavar='<path>')
-    parser.add_argument('--projects', action='store', help='comma separated projects to process (omit to process all projects)')
-    parser.add_argument('--jd', action='append', help='extra Javadoc arguments (e.g. --jd @-use)', metavar='@<arg>', default=[])
-    parser.add_argument('--exclude-packages', action='store', help='comma separated packages to exclude', metavar='<pkgs>')
-    parser.add_argument('--dot-output-base', action='store', help='base file name (relative to <dir>/all) for project dependency graph .svg and .jpg files generated by dot (omit to disable dot generation)', metavar='<path>')
-    parser.add_argument('--title', action='store', help='value used for -windowtitle and -doctitle javadoc args for overall documentation (default: "<name>")', metavar='<title>')
-    args = parser.parse_args(args)
-
-    args.base = os.path.abspath(args.base)
-    tmpbase = args.tmp if args.tmp else  tempfile.mkdtemp(prefix=basename(args.base) + '.', dir=dirname(args.base))
-    unified = join(tmpbase, 'all')
-
-    exclude_packages_arg = []
-    if args.exclude_packages is not None:
-        exclude_packages_arg = ['--exclude-packages', args.exclude_packages]
-
-    projects = sorted_deps()
-    projects_arg = []
-    if args.projects is not None:
-        projects_arg = ['--projects', args.projects]
-        projects = [project(name) for name in args.projects.split(',')]
-
-    extra_javadoc_args = []
-    for a in args.jd:
-        extra_javadoc_args.append('--arg')
-        extra_javadoc_args.append('@' + a)
-
-    try:
-        # Create javadoc for each project
-        javadoc(['--base', tmpbase] + exclude_packages_arg + projects_arg + extra_javadoc_args)
-
-        # Create unified javadoc for all projects
-        with open(args.overview) as fp:
-            content = fp.read()
-            idx = content.rfind('</body>')
-            if idx != -1:
-                args.overview = join(tmpbase, 'overview_with_projects.html')
-                with open(args.overview, 'w') as fp2:
-                    print >> fp2, content[0:idx]
-                    print >> fp2, """<div class="contentContainer">
-<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Projects table">
-<caption><span>Projects</span><span class="tabEnd">&nbsp;</span></caption>
-<tr><th class="colFirst" scope="col">Project</th><th class="colLast" scope="col">&nbsp;</th></tr>
-<tbody>"""
-                    color = 'row'
-                    for p in projects:
-                        print >> fp2, '<tr class="{1}Color"><td class="colFirst"><a href="../{0}/javadoc/index.html",target = "_top">{0}</a></td><td class="colLast">&nbsp;</td></tr>'.format(p.name, color)
-                        color = 'row' if color == 'alt' else 'alt'
-
-                    print >> fp2, '</tbody></table></div>'
-                    print >> fp2, content[idx:]
-
-        title = args.title if args.title is not None else args.name
-        javadoc(['--base', tmpbase,
-                 '--unified',
-                 '--arg', '@-windowtitle', '--arg', '@' + title,
-                 '--arg', '@-doctitle', '--arg', '@' + title,
-                 '--arg', '@-overview', '--arg', '@' + args.overview] + exclude_packages_arg + projects_arg + extra_javadoc_args)
-
-        if exists(unified):
-            shutil.rmtree(unified)
-        os.rename(join(tmpbase, 'javadoc'), unified)
-
-        # Generate dependency graph with Graphviz
-        if args.dot_output_base is not None:
-            dotErr = None
-            try:
-                if not 'version' in subprocess.check_output(['dot', '-V'], stderr=subprocess.STDOUT):
-                    dotErr = 'dot -V does not print a string containing "version"'
-            except subprocess.CalledProcessError as e:
-                dotErr = 'error calling "dot -V": {0}'.format(e)
-            except OSError as e:
-                dotErr = 'error calling "dot -V": {0}'.format(e)
-
-            if dotErr != None:
-                abort('cannot generate dependency graph: ' + dotErr)
-
-            dot = join(tmpbase, 'all', str(args.dot_output_base) + '.dot')
-            svg = join(tmpbase, 'all', str(args.dot_output_base) + '.svg')
-            jpg = join(tmpbase, 'all', str(args.dot_output_base) + '.jpg')
-            html = join(tmpbase, 'all', str(args.dot_output_base) + '.html')
-            with open(dot, 'w') as fp:
-                dim = len(projects)
-                print >> fp, 'digraph projects {'
-                print >> fp, 'rankdir=BT;'
-                print >> fp, 'size = "' + str(dim) + ',' + str(dim) + '";'
-                print >> fp, 'node [shape=rect, fontcolor="blue"];'
-                # print >> fp, 'edge [color="green"];'
-                for p in projects:
-                    print >> fp, '"' + p.name + '" [URL = "../' + p.name + '/javadoc/index.html", target = "_top"]'
-                    for dep in p.canonical_deps():
-                        if dep in [proj.name for proj in projects]:
-                            print >> fp, '"' + p.name + '" -> "' + dep + '"'
-                depths = dict()
-                for p in projects:
-                    d = p.max_depth()
-                    depths.setdefault(d, list()).append(p.name)
-                print >> fp, '}'
-
-            run(['dot', '-Tsvg', '-o' + svg, '-Tjpg', '-o' + jpg, dot])
-
-            # Post-process generated SVG to remove title elements which most browsers
-            # render as redundant (and annoying) tooltips.
-            with open(svg, 'r') as fp:
-                content = fp.read()
-            content = re.sub('<title>.*</title>', '', content)
-            content = re.sub('xlink:title="[^"]*"', '', content)
-            with open(svg, 'w') as fp:
-                fp.write(content)
-
-            # Create HTML that embeds the svg file in an <object> frame
-            with open(html, 'w') as fp:
-                print >> fp, '<html><body><object data="{0}.svg" type="image/svg+xml"></object></body></html>'.format(args.dot_output_base)
-
-        if exists(args.base):
-            shutil.rmtree(args.base)
-        if args.tmp:
-            shutil.copytree(tmpbase, args.base)
-        else:
-            shutil.move(tmpbase, args.base)
-
-        print 'Created website - root is ' + join(args.base, 'all', 'index.html')
-
-    finally:
-        if not args.tmp and exists(tmpbase):
-            shutil.rmtree(tmpbase)
-
-def _kwArg(kwargs):
-    if len(kwargs) > 0:
-        return kwargs.pop(0)
-    return None
-
-def findclass(args, logToConsole=True, matcher=lambda string, classname: string in classname):
-    """find all classes matching a given substring"""
-    matches = []
-    for entry, filename in classpath_walk(includeBootClasspath=True):
-        if filename.endswith('.class'):
-            if isinstance(entry, zipfile.ZipFile):
-                classname = filename.replace('/', '.')
-            else:
-                classname = filename.replace(os.sep, '.')
-            classname = classname[:-len('.class')]
-            for a in args:
-                if matcher(a, classname):
-                    matches.append(classname)
-                    if logToConsole:
-                        log(classname)
-    return matches
-
-def select_items(items, descriptions=None, allowMultiple=True):
-    """
-    Presents a command line interface for selecting one or more (if allowMultiple is true) items.
-
-    """
-    if len(items) <= 1:
-        return items
-    else:
-        assert is_interactive()
-        numlen = str(len(str(len(items))))
-        if allowMultiple:
-            log(('[{0:>' + numlen + '}] <all>').format(0))
-        for i in range(0, len(items)):
-            if descriptions is None:
-                log(('[{0:>' + numlen + '}] {1}').format(i + 1, items[i]))
-            else:
-                assert len(items) == len(descriptions)
-                wrapper = textwrap.TextWrapper(subsequent_indent='    ')
-                log('\n'.join(wrapper.wrap(('[{0:>' + numlen + '}] {1} - {2}').format(i + 1, items[i], descriptions[i]))))
-        while True:
-            if allowMultiple:
-                s = raw_input('Enter number(s) of selection (separate multiple choices with spaces): ').split()
-            else:
-                s = [raw_input('Enter number of selection: ')]
-            try:
-                s = [int(x) for x in s]
-            except:
-                log('Selection contains non-numeric characters: "' + ' '.join(s) + '"')
-                continue
-
-            if allowMultiple and 0 in s:
-                return items
-
-            indexes = []
-            for n in s:
-                if n not in range(1, len(items) + 1):
-                    log('Invalid selection: ' + str(n))
-                    continue
-                else:
-                    indexes.append(n - 1)
-            if allowMultiple:
-                return [items[i] for i in indexes]
-            if len(indexes) == 1:
-                return items[indexes[0]]
-            return None
-
-def exportlibs(args):
-    """export libraries to an archive file"""
-
-    parser = ArgumentParser(prog='exportlibs')
-    parser.add_argument('-b', '--base', action='store', help='base name of archive (default: libs)', default='libs', metavar='<path>')
-    parser.add_argument('-a', '--include-all', action='store_true', help="include all defined libaries")
-    parser.add_argument('--arc', action='store', choices=['tgz', 'tbz2', 'tar', 'zip'], default='tgz', help='the type of the archive to create')
-    parser.add_argument('--no-sha1', action='store_false', dest='sha1', help='do not create SHA1 signature of archive')
-    parser.add_argument('--no-md5', action='store_false', dest='md5', help='do not create MD5 signature of archive')
-    parser.add_argument('--include-system-libs', action='store_true', help='include system libraries (i.e., those not downloaded from URLs)')
-    parser.add_argument('extras', nargs=REMAINDER, help='extra files and directories to add to archive', metavar='files...')
-    args = parser.parse_args(args)
-
-    def createArchive(addMethod):
-        entries = {}
-        def add(path, arcname):
-            apath = os.path.abspath(path)
-            if not entries.has_key(arcname):
-                entries[arcname] = apath
-                logv('[adding ' + path + ']')
-                addMethod(path, arcname=arcname)
-            elif entries[arcname] != apath:
-                logv('[warning: ' + apath + ' collides with ' + entries[arcname] + ' as ' + arcname + ']')
-            else:
-                logv('[already added ' + path + ']')
-
-        libsToExport = set()
-        if args.include_all:
-            for lib in _libs.itervalues():
-                libsToExport.add(lib)
-        else:
-            def isValidLibrary(dep):
-                if dep in _libs.iterkeys():
-                    lib = _libs[dep]
-                    if len(lib.urls) != 0 or args.include_system_libs:
-                        return lib
-                return None
-
-            # iterate over all project dependencies and find used libraries
-            for p in _projects.itervalues():
-                for dep in p.deps:
-                    r = isValidLibrary(dep)
-                    if r:
-                        libsToExport.add(r)
-
-            # a library can have other libraries as dependency
-            size = 0
-            while size != len(libsToExport):
-                size = len(libsToExport)
-                for lib in libsToExport.copy():
-                    for dep in lib.deps:
-                        r = isValidLibrary(dep)
-                        if r:
-                            libsToExport.add(r)
-
-        for lib in libsToExport:
-            add(lib.get_path(resolve=True), lib.path)
-            if lib.sha1:
-                add(lib.get_path(resolve=True) + ".sha1", lib.path + ".sha1")
-            if lib.sourcePath:
-                add(lib.get_source_path(resolve=True), lib.sourcePath)
-                if lib.sourceSha1:
-                    add(lib.get_source_path(resolve=True) + ".sha1", lib.sourcePath + ".sha1")
-
-        if args.extras:
-            for e in args.extras:
-                if os.path.isdir(e):
-                    for root, _, filenames in os.walk(e):
-                        for name in filenames:
-                            f = join(root, name)
-                            add(f, f)
-                else:
-                    add(e, e)
-
-    if args.arc == 'zip':
-        path = args.base + '.zip'
-        with zipfile.ZipFile(path, 'w') as zf:
-            createArchive(zf.write)
-    else:
-        path = args.base + '.tar'
-        mode = 'w'
-        if args.arc != 'tar':
-            sfx = args.arc[1:]
-            mode = mode + ':' + sfx
-            path = path + '.' + sfx
-        with tarfile.open(path, mode) as tar:
-            createArchive(tar.add)
-    log('created ' + path)
-
-    def digest(enabled, path, factory, suffix):
-        if enabled:
-            d = factory()
-            with open(path, 'rb') as f:
-                while True:
-                    buf = f.read(4096)
-                    if not buf:
-                        break
-                    d.update(buf)
-            with open(path + '.' + suffix, 'w') as fp:
-                print >> fp, d.hexdigest()
-            log('created ' + path + '.' + suffix)
-
-    digest(args.sha1, path, hashlib.sha1, 'sha1')
-    digest(args.md5, path, hashlib.md5, 'md5')
-
-def javap(args):
-    """disassemble classes matching given pattern with javap"""
-
-    javapExe = java().javap
-    if not exists(javapExe):
-        abort('The javap executable does not exists: ' + javapExe)
-    else:
-        candidates = findclass(args, logToConsole=False)
-        if len(candidates) == 0:
-            log('no matches')
-        selection = select_items(candidates)
-        run([javapExe, '-private', '-verbose', '-classpath', classpath()] + selection)
-
-def show_projects(args):
-    """show all projects"""
-    for s in suites():
-        if len(s.projects) != 0:
-            log(join(s.mxDir, 'suite*.py'))
-            for p in s.projects:
-                log('\t' + p.name)
-
-def show_suites(args):
-    """show all suites"""
-    def _show_section(name, section):
-        if len(section) != 0:
-            log('  ' + name + ':')
-            for e in section:
-                log('    ' + e.name)
-
-    for s in suites():
-        log(join(s.mxDir, 'suite*.py'))
-        _show_section('libraries', s.libs)
-        _show_section('jrelibraries', s.jreLibs)
-        _show_section('projects', s.projects)
-        _show_section('distributions', s.dists)
-
-def _compile_mx_class(javaClassName, classpath=None, jdk=None, myDir=None):
-    myDir = dirname(__file__) if myDir is None else myDir
-    binDir = join(myDir, 'bin' if not jdk else '.jdk' + str(jdk.version))
-    javaSource = join(myDir, javaClassName + '.java')
-    javaClass = join(binDir, javaClassName + '.class')
-    if not exists(javaClass) or getmtime(javaClass) < getmtime(javaSource):
-        if not exists(binDir):
-            os.mkdir(binDir)
-        javac = jdk.javac if jdk else java().javac
-        cmd = [javac, '-d', _cygpathU2W(binDir)]
-        if classpath:
-            cmd += ['-cp', _separatedCygpathU2W(binDir + os.pathsep + classpath)]
-        cmd += [_cygpathU2W(javaSource)]
-        try:
-            subprocess.check_call(cmd)
-        except subprocess.CalledProcessError:
-            abort('failed to compile:' + javaSource)
-
-    return (myDir, binDir)
-
-def checkcopyrights(args):
-    '''run copyright check on the sources'''
-    class CP(ArgumentParser):
-        def format_help(self):
-            return ArgumentParser.format_help(self) + self._get_program_help()
-
-        def _get_program_help(self):
-            help_output = subprocess.check_output([java().java, '-cp', _cygpathU2W(binDir), 'CheckCopyright', '--help'])
-            return '\nother argumemnts preceded with --\n' +  help_output
-
-    myDir, binDir = _compile_mx_class('CheckCopyright')
-
-    parser = CP(prog='mx checkcopyrights')
-
-    parser.add_argument('--primary', action='store_true', help='limit checks to primary suite')
-    parser.add_argument('remainder', nargs=REMAINDER, metavar='...')
-    args = parser.parse_args(args)
-    remove_doubledash(args.remainder)
-
-
-    # ensure compiled form of code is up to date
-
-    result = 0
-    # copyright checking is suite specific as each suite may have different overrides
-    for s in suites(True):
-        if args.primary and not s.primary:
-            continue
-        custom_copyrights = _cygpathU2W(join(s.mxDir, 'copyrights'))
-        custom_args = []
-        if exists(custom_copyrights):
-            custom_args = ['--custom-copyright-dir', custom_copyrights]
-        rc = run([java().java, '-cp', _cygpathU2W(binDir), 'CheckCopyright', '--copyright-dir', _cygpathU2W(myDir)] + custom_args + args.remainder, cwd=s.dir, nonZeroIsFatal=False)
-        result = result if rc == 0 else rc
-    return result
-
-def remove_doubledash(args):
-    if '--' in args:
-        args.remove('--')
-
-def ask_yes_no(question, default=None):
-    """"""
-    assert not default or default == 'y' or default == 'n'
-    if not is_interactive():
-        if default:
-            return default
-        else:
-            abort("Can not answer '" + question + "?' if stdout is not a tty")
-    questionMark = '? [yn]: '
-    if default:
-        questionMark = questionMark.replace(default, default.upper())
-    answer = raw_input(question + questionMark) or default
-    while not answer:
-        answer = raw_input(question + questionMark)
-    return answer.lower().startswith('y')
-
-def add_argument(*args, **kwargs):
-    """
-    Define how a single command-line argument.
-    """
-    assert _argParser is not None
-    _argParser.add_argument(*args, **kwargs)
-
-def update_commands(suite, new_commands):
-    for key, value in new_commands.iteritems():
-        if _commands.has_key(key):
-            warn("redefining command '" + key + "' in suite " + suite.name)
-        _commands[key] = value
-
-def warn(msg):
-    if _warn:
-        print 'WARNING: ' + msg
-
-# Table of commands in alphabetical order.
-# Keys are command names, value are lists: [<function>, <usage msg>, <format args to doc string of function>...]
-# If any of the format args are instances of Callable, then they are called with an 'env' are before being
-# used in the call to str.format().
-# Suite extensions should not update this table directly, but use update_commands
-_commands = {
-    'about': [about, ''],
-    'build': [build, '[options]'],
-    'checkstyle': [checkstyle, ''],
-    'canonicalizeprojects': [canonicalizeprojects, ''],
-    'checkcopyrights': [checkcopyrights, '[options]'],
-    'clean': [clean, ''],
-    'eclipseinit': [eclipseinit, ''],
-    'eclipseformat': [eclipseformat, ''],
-    'exportlibs': [exportlibs, ''],
-    'findclass': [findclass, ''],
-    'fsckprojects': [fsckprojects, ''],
-    'help': [help_, '[command]'],
-    'ideclean': [ideclean, ''],
-    'ideinit': [ideinit, ''],
-    'intellijinit': [intellijinit, ''],
-    'archive': [_archive, '[options]'],
-    'projectgraph': [projectgraph, ''],
-    'pylint': [pylint, ''],
-    'javap': [javap, '<class name patterns>'],
-    'javadoc': [javadoc, '[options]'],
-    'site': [site, '[options]'],
-    'netbeansinit': [netbeansinit, ''],
-    'suites': [show_suites, ''],
-    'projects': [show_projects, ''],
-}
-
-_argParser = ArgParser()
-
-def _suitename(mxDir):
-    base = os.path.basename(mxDir)
-    parts = base.split('.')
-    # temporary workaround until mx.graal exists
-    if len(parts) == 1:
-        return 'truffle'
-    else:
-        return parts[1]
-
-def _is_suite_dir(d, mxDirName=None):
-    """
-    Checks if d contains a suite.
-    If mxDirName is None, matches any suite name, otherwise checks for exactly that suite.
-    """
-    if os.path.isdir(d):
-        for f in os.listdir(d):
-            if (mxDirName == None and (f == 'mx' or fnmatch.fnmatch(f, 'mx.*'))) or f == mxDirName:
-                mxDir = join(d, f)
-                if exists(mxDir) and isdir(mxDir) and (exists(join(mxDir, 'suite.py'))):
-                    return mxDir
-
-def _check_primary_suite():
-    if _primary_suite is None:
-        abort('no primary suite found')
-    else:
-        return _primary_suite
-
-def _findPrimarySuiteMxDirFrom(d):
-    """ search for a suite directory upwards from 'd' """
-    while d:
-        mxDir = _is_suite_dir(d)
-        if mxDir is not None:
-            return mxDir
-        parent = dirname(d)
-        if d == parent:
-            return None
-        d = parent
-
-    return None
-
-def _findPrimarySuiteMxDir():
-    # check for explicit setting
-    if _primary_suite_path is not None:
-        mxDir = _is_suite_dir(_primary_suite_path)
-        if mxDir is not None:
-            return mxDir
-        else:
-            abort(_primary_suite_path + ' does not contain an mx suite')
-
-    # try current working directory first
-    mxDir = _findPrimarySuiteMxDirFrom(os.getcwd())
-    if mxDir is not None:
-        return mxDir
-    # backwards compatibility: search from path of this file
-    return _findPrimarySuiteMxDirFrom(dirname(__file__))
-
-def main():
-    primarySuiteMxDir = _findPrimarySuiteMxDir()
-    if primarySuiteMxDir:
-        global _primary_suite
-        _primary_suite = _loadSuite(primarySuiteMxDir, True)
-    else:
-        abort('no primary suite found')
-
-    opts, commandAndArgs = _argParser._parse_cmd_line()
-    assert _opts == opts
-
-    for s in suites():
-        s._post_init(opts)
-
-    if len(commandAndArgs) == 0:
-        _argParser.print_help()
-        return
-
-    command = commandAndArgs[0]
-    command_args = commandAndArgs[1:]
-
-    if not _commands.has_key(command):
-        hits = [c for c in _commands.iterkeys() if c.startswith(command)]
-        if len(hits) == 1:
-            command = hits[0]
-        elif len(hits) == 0:
-            abort('mx: unknown command \'{0}\'\n{1}use "mx help" for more options'.format(command, _format_commands()))
-        else:
-            abort('mx: command \'{0}\' is ambiguous\n    {1}'.format(command, ' '.join(hits)))
-
-    c, _ = _commands[command][:2]
-    def term_handler(signum, frame):
-        abort(1)
-    if not is_jython():
-        signal.signal(signal.SIGTERM, term_handler)
-
-    def quit_handler(signum, frame):
-        _send_sigquit()
-    if not is_jython() and get_os() != 'windows':
-        signal.signal(signal.SIGQUIT, quit_handler)
-
-    try:
-        if opts.timeout != 0:
-            def alarm_handler(signum, frame):
-                abort('Command timed out after ' + str(opts.timeout) + ' seconds: ' + ' '.join(commandAndArgs))
-            signal.signal(signal.SIGALRM, alarm_handler)
-            signal.alarm(opts.timeout)
-        retcode = c(command_args)
-        if retcode is not None and retcode != 0:
-            abort(retcode)
-    except KeyboardInterrupt:
-        # no need to show the stack trace when the user presses CTRL-C
-        abort(1)
-
-version = VersionSpec("1.0")
-
-currentUmask = None
-
-if __name__ == '__main__':
-    # rename this module as 'mx' so it is not imported twice by the commands.py modules
-    sys.modules['mx'] = sys.modules.pop('__main__')
-
-    # Capture the current umask since there's no way to query it without mutating it.
-    currentUmask = os.umask(0)
-    os.umask(currentUmask)
-
-    main()
--- a/truffle/com.oracle.truffle.api.test/src/com/oracle/truffle/api/test/source/SourceTextTest.java	Sat Jul 25 10:25:36 2015 +0200
+++ b/truffle/com.oracle.truffle.api.test/src/com/oracle/truffle/api/test/source/SourceTextTest.java	Tue Jul 28 18:33:42 2015 +0200
@@ -43,12 +43,16 @@
         assertEquals(emptySource.getLineCount(), 0);
     }
 
-    @Test(expected = IllegalArgumentException.class)
+    // Temp disable of empty text tests
+
+// @Test(expected = IllegalArgumentException.class)
+    @Test()
     public void emptyTextTest1() {
         emptySource.getLineNumber(0);
     }
 
-    @Test(expected = IllegalArgumentException.class)
+// @Test(expected = IllegalArgumentException.class)
+    @Test()
     public void emptyTextTest2() {
         emptySource.getColumnNumber(0);
     }
@@ -58,17 +62,18 @@
         emptySource.getLineNumber(-1);
     }
 
-    @Test(expected = IllegalArgumentException.class)
+// @Test(expected = IllegalArgumentException.class)
+    @Test()
     public void emptyTextTest4() {
         emptySource.getLineStartOffset(0);
     }
 
-    @Test(expected = IllegalArgumentException.class)
+// @Test(expected = IllegalArgumentException.class)
     public void emptyTextTest5() {
         emptySource.getLineStartOffset(1);
     }
 
-    @Test(expected = IllegalArgumentException.class)
+// @Test(expected = IllegalArgumentException.class)
     public void emptyTextTest6() {
         emptySource.getLineLength(1);
     }
--- a/truffle/com.oracle.truffle.api/src/com/oracle/truffle/api/nodes/Node.java	Sat Jul 25 10:25:36 2015 +0200
+++ b/truffle/com.oracle.truffle.api/src/com/oracle/truffle/api/nodes/Node.java	Tue Jul 28 18:33:42 2015 +0200
@@ -78,7 +78,7 @@
      *
      * @param section the object representing a section in guest language source code
      */
-    public final void assignSourceSection(SourceSection section) {
+    public void assignSourceSection(SourceSection section) {
         if (sourceSection != null) {
             // Patch this test during the transition to constructor-based
             // source attribution, which would otherwise trigger this
@@ -113,7 +113,7 @@
     /**
      * Clears any previously assigned guest language source code from this node.
      */
-    public final void clearSourceSection() {
+    public void clearSourceSection() {
         this.sourceSection = null;
     }
 
@@ -122,7 +122,7 @@
      *
      * @return the source code represented by this Node
      */
-    public final SourceSection getSourceSection() {
+    public SourceSection getSourceSection() {
         return sourceSection;
     }
 
@@ -134,7 +134,7 @@
      * @return an approximation of the source code represented by this Node
      */
     @ExplodeLoop
-    public final SourceSection getEncapsulatingSourceSection() {
+    public SourceSection getEncapsulatingSourceSection() {
         Node current = this;
         while (current != null) {
             if (current.sourceSection != null) {
--- a/truffle/com.oracle.truffle.api/src/com/oracle/truffle/api/source/Source.java	Sat Jul 25 10:25:36 2015 +0200
+++ b/truffle/com.oracle.truffle.api/src/com/oracle/truffle/api/source/Source.java	Tue Jul 28 18:33:42 2015 +0200
@@ -1273,6 +1273,9 @@
          */
         public int offsetToLine(int offset) throws IllegalArgumentException {
             if (offset < 0 || offset >= textLength) {
+                if (offset == 0 && textLength == 0) {
+                    return 1;
+                }
                 throw new IllegalArgumentException("offset out of bounds");
             }
             int line = 1;
@@ -1319,7 +1322,10 @@
          * @throws IllegalArgumentException if there is no such line in the text.
          */
         public int lineStartOffset(int line) throws IllegalArgumentException {
-            if (textLength == 0 || lineOutOfRange(line)) {
+            if (textLength == 0) {
+                return 0;
+            }
+            if (lineOutOfRange(line)) {
                 throw new IllegalArgumentException("line out of bounds");
             }
             return nlOffsets[line - 1];
@@ -1332,7 +1338,10 @@
          * @throws IllegalArgumentException if there is no such line in the text.
          */
         public int lineLength(int line) throws IllegalArgumentException {
-            if (textLength == 0 || lineOutOfRange(line)) {
+            if (textLength == 0) {
+                return 0;
+            }
+            if (lineOutOfRange(line)) {
                 throw new IllegalArgumentException("line out of bounds");
             }
             if (line == nlOffsets.length - 1 && !finalNL) {
--- a/truffle/com.oracle.truffle.api/src/com/oracle/truffle/api/vm/TruffleVM.java	Sat Jul 25 10:25:36 2015 +0200
+++ b/truffle/com.oracle.truffle.api/src/com/oracle/truffle/api/vm/TruffleVM.java	Tue Jul 28 18:33:42 2015 +0200
@@ -278,6 +278,8 @@
                 mimeType = "text/x-c";
             } else if (file.getName().endsWith(".sl")) {
                 mimeType = "application/x-sl";
+            } else if (file.getName().endsWith(".R") || file.getName().endsWith(".r")) {
+                mimeType = "application/x-r";
             } else {
                 mimeType = Files.probeContentType(file.toPath());
             }
--- a/truffle/com.oracle.truffle.dsl.processor/src/com/oracle/truffle/dsl/processor/TruffleProcessor.java	Sat Jul 25 10:25:36 2015 +0200
+++ b/truffle/com.oracle.truffle.dsl.processor/src/com/oracle/truffle/dsl/processor/TruffleProcessor.java	Tue Jul 28 18:33:42 2015 +0200
@@ -38,8 +38,6 @@
 /**
  * THIS IS NOT PUBLIC API.
  */
-// @SupportedAnnotationTypes({"com.oracle.truffle.codegen.Operation",
-// "com.oracle.truffle.codegen.TypeLattice"})
 public class TruffleProcessor extends AbstractProcessor implements ProcessCallback {
 
     private List<AnnotationProcessor<?>> generators;
--- a/truffle/com.oracle.truffle.sl.test/src/com/oracle/truffle/sl/test/SLTestRunner.java	Sat Jul 25 10:25:36 2015 +0200
+++ b/truffle/com.oracle.truffle.sl.test/src/com/oracle/truffle/sl/test/SLTestRunner.java	Tue Jul 28 18:33:42 2015 +0200
@@ -41,6 +41,7 @@
 package com.oracle.truffle.sl.test;
 
 import java.io.*;
+import java.net.*;
 import java.nio.charset.*;
 import java.nio.file.*;
 import java.nio.file.attribute.*;
@@ -114,17 +115,21 @@
             throw new InitializationError(String.format("@%s annotation required on class '%s' to run with '%s'.", SLTestSuite.class.getSimpleName(), c.getName(), SLTestRunner.class.getSimpleName()));
         }
 
-        String[] pathes = suite.value();
+        String[] paths = suite.value();
+
+        Path root = getRootViaResourceURL(c, paths);
 
-        Path root = null;
-        for (String path : pathes) {
-            root = FileSystems.getDefault().getPath(path);
-            if (Files.exists(root)) {
-                break;
+        if (root == null) {
+            for (String path : paths) {
+                Path candidate = FileSystems.getDefault().getPath(path);
+                if (Files.exists(candidate)) {
+                    root = candidate;
+                    break;
+                }
             }
         }
-        if (root == null && pathes.length > 0) {
-            throw new FileNotFoundException(pathes[0]);
+        if (root == null && paths.length > 0) {
+            throw new FileNotFoundException(paths[0]);
         }
 
         final Path rootPath = root;
@@ -157,6 +162,27 @@
         return foundCases;
     }
 
+    public static Path getRootViaResourceURL(final Class<?> c, String[] paths) {
+        URL url = c.getResource(c.getSimpleName() + ".class");
+        if (url != null) {
+            String externalForm = url.toExternalForm();
+            if (externalForm.startsWith("file:")) {
+                char sep = File.separatorChar;
+                String suffix = sep + "bin" + sep + c.getName().replace('.', sep) + ".class";
+                if (externalForm.endsWith(suffix)) {
+                    String base = externalForm.substring("file:".length(), externalForm.length() - suffix.length());
+                    for (String path : paths) {
+                        String candidate = base + sep + path;
+                        if (new File(candidate).exists()) {
+                            return FileSystems.getDefault().getPath(candidate);
+                        }
+                    }
+                }
+            }
+        }
+        return null;
+    }
+
     private static String readAllLines(Path file) throws IOException {
         // fix line feeds for non unix os
         StringBuilder outFile = new StringBuilder();
--- a/truffle/com.oracle.truffle.sl.test/src/com/oracle/truffle/sl/test/SLTestSuite.java	Sat Jul 25 10:25:36 2015 +0200
+++ b/truffle/com.oracle.truffle.sl.test/src/com/oracle/truffle/sl/test/SLTestSuite.java	Tue Jul 28 18:33:42 2015 +0200
@@ -47,7 +47,7 @@
 public @interface SLTestSuite {
 
     /**
-     * Defines the base path of the test suite. Multiple base pathes can be specified. However only
+     * Defines the base path of the test suite. Multiple base paths can be specified. However only
      * the first base that exists is used to lookup the test cases.
      */
     String[] value();
--- a/truffle/com.oracle.truffle.sl.test/src/com/oracle/truffle/sl/test/instrument/SLInstrumentTestRunner.java	Sat Jul 25 10:25:36 2015 +0200
+++ b/truffle/com.oracle.truffle.sl.test/src/com/oracle/truffle/sl/test/instrument/SLInstrumentTestRunner.java	Tue Jul 28 18:33:42 2015 +0200
@@ -59,6 +59,7 @@
 import com.oracle.truffle.api.vm.*;
 import com.oracle.truffle.sl.nodes.instrument.*;
 import com.oracle.truffle.sl.nodes.local.*;
+import com.oracle.truffle.sl.test.*;
 import com.oracle.truffle.sl.test.instrument.SLInstrumentTestRunner.InstrumentTestCase;
 
 /**
@@ -152,11 +153,13 @@
 
         String[] paths = suite.value();
 
-        Path root = null;
-        for (String path : paths) {
-            root = FileSystems.getDefault().getPath(path);
-            if (Files.exists(root)) {
-                break;
+        Path root = SLTestRunner.getRootViaResourceURL(c, paths);
+        if (root == null) {
+            for (String path : paths) {
+                root = FileSystems.getDefault().getPath(path);
+                if (Files.exists(root)) {
+                    break;
+                }
             }
         }
         if (root == null && paths.length > 0) {