# HG changeset patch # User Doug Simon # Date 1417619242 -3600 # Node ID 7d8270532cd9c456a53b6d79d60195798d29d3c6 # Parent 676f1800077ca4ce1ec077116529750c87cc4682 mx: changes towards supporting python 2.6 for call to mx from make/Makefile diff -r 676f1800077c -r 7d8270532cd9 make/Makefile --- a/make/Makefile Wed Dec 03 16:02:36 2014 +0100 +++ b/make/Makefile Wed Dec 03 16:07:22 2014 +0100 @@ -308,7 +308,7 @@ # Builds code that can be shared among different build flavors buildshared: - python2.7 -u $(GAMMADIR)/mxtool/mx.py build --no-native --export-dir $(SHARED_DIR) + python2 -u $(GAMMADIR)/mxtool/mx.py build --no-native --export-dir $(SHARED_DIR) # Export file rule generic_export: $(EXPORT_LIST) diff -r 676f1800077c -r 7d8270532cd9 mx/mx_graal.py --- a/mx/mx_graal.py Wed Dec 03 16:02:36 2014 +0100 +++ b/mx/mx_graal.py Wed Dec 03 16:07:22 2014 +0100 @@ -721,7 +721,7 @@ major, minor = map(int, most_recent_tag_version.split('.')) cached_graal_version = str(major) + '.' + str(minor + 1) + '-' + dev_suffix else: - cached_graal_version = 'unknown-{}-{}'.format(platform.node(), time.strftime('%Y-%m-%d_%H-%M-%S_%Z')) + cached_graal_version = 'unknown-{0}-{1}'.format(platform.node(), time.strftime('%Y-%m-%d_%H-%M-%S_%Z')) return cached_graal_version diff -r 676f1800077c -r 7d8270532cd9 mxtool/mx.py --- a/mxtool/mx.py Wed Dec 03 16:02:36 2014 +0100 +++ b/mxtool/mx.py Wed Dec 03 16:07:22 2014 +0100 @@ -48,6 +48,28 @@ from argparse import ArgumentParser, REMAINDER from os.path import join, basename, dirname, exists, getmtime, isabs, expandvars, isdir, isfile +# Support for Python 2.6 +def check_output(*popenargs, **kwargs): + process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) + output, _ = process.communicate() + retcode = process.poll() + if retcode: + cmd = kwargs.get("args") + if cmd is None: + cmd = popenargs[0] + error = subprocess.CalledProcessError(retcode, cmd) + error.output = output + raise error + return output + +try: subprocess.check_output +except: subprocess.check_output = check_output + +try: zipfile.ZipFile.__enter__ +except: + zipfile.ZipFile.__enter__ = lambda self: self + zipfile.ZipFile.__exit__ = lambda self, t, value, traceback: self.close() + _projects = dict() _libs = dict() _jreLibs = dict() @@ -115,103 +137,104 @@ # are sources combined into main archive? unified = self.path == self.sourcesPath - with Archiver(self.path) as arc, Archiver(None if unified else self.sourcesPath) as srcArcRaw: - srcArc = arc if unified else srcArcRaw - services = {} - def overwriteCheck(zf, arcname, source): - if not hasattr(zf, '_provenance'): - zf._provenance = {} - existingSource = zf._provenance.get(arcname, None) - isOverwrite = False - if existingSource and existingSource != source: - if arcname[-1] != os.path.sep: - logv('warning: ' + self.path + ': avoid overwrite of ' + arcname + '\n new: ' + source + '\n old: ' + existingSource) - isOverwrite = True - zf._provenance[arcname] = source - return isOverwrite - - if self.mainClass: - manifest = "Manifest-Version: 1.0\nMain-Class: %s\n\n" % (self.mainClass) - if not overwriteCheck(arc.zf, "META-INF/MANIFEST.MF", "project files"): - arc.zf.writestr("META-INF/MANIFEST.MF", manifest) - - for dep in self.sorted_deps(includeLibs=True): - isCoveredByDependecy = False - for d in self.distDependencies: - if dep in _dists[d].sorted_deps(includeLibs=True, transitive=True): - logv("Excluding {0} from {1} because it's provided by the dependency {2}".format(dep.name, self.path, d)) - isCoveredByDependecy = True - break - - if isCoveredByDependecy: - continue - - if dep.isLibrary(): - l = dep - # merge library jar into distribution jar - logv('[' + self.path + ': adding library ' + l.name + ']') - lpath = l.get_path(resolve=True) - libSourcePath = l.get_source_path(resolve=True) - if lpath: - with zipfile.ZipFile(lpath, 'r') as lp: - for arcname in lp.namelist(): - if arcname.startswith('META-INF/services/') and not arcname == 'META-INF/services/': - service = arcname[len('META-INF/services/'):] - assert '/' not in service - services.setdefault(service, []).extend(lp.read(arcname).splitlines()) - else: - if not overwriteCheck(arc.zf, arcname, lpath + '!' + arcname): - arc.zf.writestr(arcname, lp.read(arcname)) - if srcArc.zf and libSourcePath: - with zipfile.ZipFile(libSourcePath, 'r') as lp: - for arcname in lp.namelist(): - if not overwriteCheck(srcArc.zf, arcname, lpath + '!' + arcname): - srcArc.zf.writestr(arcname, lp.read(arcname)) - elif dep.isProject(): - p = dep - - if self.javaCompliance: - if p.javaCompliance > self.javaCompliance: - abort("Compliance level doesn't match: Distribution {0} requires {1}, but {2} is {3}.".format(self.name, self.javaCompliance, p.name, p.javaCompliance)) - - # skip a Java project if its Java compliance level is "higher" than the configured JDK - jdk = java(p.javaCompliance) - assert jdk - - logv('[' + self.path + ': adding project ' + p.name + ']') - outputDir = p.output_dir() - for root, _, files in os.walk(outputDir): - relpath = root[len(outputDir) + 1:] - if relpath == join('META-INF', 'services'): - for service in files: - with open(join(root, service), 'r') as fp: - services.setdefault(service, []).extend([provider.strip() for provider in fp.readlines()]) - elif relpath == join('META-INF', 'providers'): - for provider in files: - with open(join(root, provider), 'r') as fp: - for service in fp: - services.setdefault(service.strip(), []).append(provider) - else: - for f in files: - arcname = join(relpath, f).replace(os.sep, '/') - if not overwriteCheck(arc.zf, arcname, join(root, f)): - arc.zf.write(join(root, f), arcname) - if srcArc.zf: - sourceDirs = p.source_dirs() - if p.source_gen_dir(): - sourceDirs.append(p.source_gen_dir()) - for srcDir in sourceDirs: - for root, _, files in os.walk(srcDir): - relpath = root[len(srcDir) + 1:] + with Archiver(self.path) as arc: + with Archiver(None if unified else self.sourcesPath) as srcArcRaw: + srcArc = arc if unified else srcArcRaw + services = {} + def overwriteCheck(zf, arcname, source): + if not hasattr(zf, '_provenance'): + zf._provenance = {} + existingSource = zf._provenance.get(arcname, None) + isOverwrite = False + if existingSource and existingSource != source: + if arcname[-1] != os.path.sep: + logv('warning: ' + self.path + ': avoid overwrite of ' + arcname + '\n new: ' + source + '\n old: ' + existingSource) + isOverwrite = True + zf._provenance[arcname] = source + return isOverwrite + + if self.mainClass: + manifest = "Manifest-Version: 1.0\nMain-Class: %s\n\n" % (self.mainClass) + if not overwriteCheck(arc.zf, "META-INF/MANIFEST.MF", "project files"): + arc.zf.writestr("META-INF/MANIFEST.MF", manifest) + + for dep in self.sorted_deps(includeLibs=True): + isCoveredByDependecy = False + for d in self.distDependencies: + if dep in _dists[d].sorted_deps(includeLibs=True, transitive=True): + logv("Excluding {0} from {1} because it's provided by the dependency {2}".format(dep.name, self.path, d)) + isCoveredByDependecy = True + break + + if isCoveredByDependecy: + continue + + if dep.isLibrary(): + l = dep + # merge library jar into distribution jar + logv('[' + self.path + ': adding library ' + l.name + ']') + lpath = l.get_path(resolve=True) + libSourcePath = l.get_source_path(resolve=True) + if lpath: + with zipfile.ZipFile(lpath, 'r') as lp: + for arcname in lp.namelist(): + if arcname.startswith('META-INF/services/') and not arcname == 'META-INF/services/': + service = arcname[len('META-INF/services/'):] + assert '/' not in service + services.setdefault(service, []).extend(lp.read(arcname).splitlines()) + else: + if not overwriteCheck(arc.zf, arcname, lpath + '!' + arcname): + arc.zf.writestr(arcname, lp.read(arcname)) + if srcArc.zf and libSourcePath: + with zipfile.ZipFile(libSourcePath, 'r') as lp: + for arcname in lp.namelist(): + if not overwriteCheck(srcArc.zf, arcname, lpath + '!' + arcname): + srcArc.zf.writestr(arcname, lp.read(arcname)) + elif dep.isProject(): + p = dep + + if self.javaCompliance: + if p.javaCompliance > self.javaCompliance: + abort("Compliance level doesn't match: Distribution {0} requires {1}, but {2} is {3}.".format(self.name, self.javaCompliance, p.name, p.javaCompliance)) + + # skip a Java project if its Java compliance level is "higher" than the configured JDK + jdk = java(p.javaCompliance) + assert jdk + + logv('[' + self.path + ': adding project ' + p.name + ']') + outputDir = p.output_dir() + for root, _, files in os.walk(outputDir): + relpath = root[len(outputDir) + 1:] + if relpath == join('META-INF', 'services'): + for service in files: + with open(join(root, service), 'r') as fp: + services.setdefault(service, []).extend([provider.strip() for provider in fp.readlines()]) + elif relpath == join('META-INF', 'providers'): + for provider in files: + with open(join(root, provider), 'r') as fp: + for service in fp: + services.setdefault(service.strip(), []).append(provider) + else: for f in files: - if f.endswith('.java'): - arcname = join(relpath, f).replace(os.sep, '/') - if not overwriteCheck(srcArc.zf, arcname, join(root, f)): - srcArc.zf.write(join(root, f), arcname) - - for service, providers in services.iteritems(): - arcname = 'META-INF/services/' + service - arc.zf.writestr(arcname, '\n'.join(providers)) + arcname = join(relpath, f).replace(os.sep, '/') + if not overwriteCheck(arc.zf, arcname, join(root, f)): + arc.zf.write(join(root, f), arcname) + if srcArc.zf: + sourceDirs = p.source_dirs() + if p.source_gen_dir(): + sourceDirs.append(p.source_gen_dir()) + for srcDir in sourceDirs: + for root, _, files in os.walk(srcDir): + relpath = root[len(srcDir) + 1:] + for f in files: + if f.endswith('.java'): + arcname = join(relpath, f).replace(os.sep, '/') + if not overwriteCheck(srcArc.zf, arcname, join(root, f)): + srcArc.zf.write(join(root, f), arcname) + + for service, providers in services.iteritems(): + arcname = 'META-INF/services/' + service + arc.zf.writestr(arcname, '\n'.join(providers)) self.notify_updated() @@ -820,7 +843,7 @@ # temporarily extend the Python path sys.path.insert(0, mxDir) - snapshot = frozenset(sys.modules.viewkeys()) + snapshot = frozenset(sys.modules.keys()) module = __import__(moduleName) if savedModule: @@ -833,7 +856,7 @@ # For now fail fast if extra modules were loaded. # This can later be relaxed to simply remove the extra modules # from the sys.modules name space if necessary. - extraModules = sys.modules.viewkeys() - snapshot + extraModules = frozenset(sys.modules.keys()) - snapshot assert len(extraModules) == 0, 'loading ' + modulePath + ' caused extra modules to be loaded: ' + ', '.join([m for m in extraModules]) # revert the Python path @@ -843,7 +866,7 @@ abort(modulePath + ' must define a variable named "' + dictName + '"') d = expand(getattr(module, dictName), [dictName]) sections = ['projects', 'libraries', 'jrelibraries', 'distributions'] + (['distribution_extensions'] if suite else ['name', 'mxversion']) - unknown = d.viewkeys() - sections + unknown = frozenset(d.keys()) - frozenset(sections) if unknown: abort(modulePath + ' defines unsupported suite sections: ' + ', '.join(unknown)) @@ -857,7 +880,7 @@ if not existing: suite[s] = additional else: - conflicting = additional.viewkeys() & existing.viewkeys() + conflicting = additional.keys() & existing.keys() if conflicting: abort(modulePath + ' redefines: ' + ', '.join(conflicting)) existing.update(additional) @@ -1039,12 +1062,13 @@ currentAps = zf.read(config).split() if currentAps != aps: logv('[updating ' + config + ' in ' + apsJar + ']') - with Archiver(apsJar) as arc, zipfile.ZipFile(apsJar, 'r') as lp: - for arcname in lp.namelist(): - if arcname == config: - arc.zf.writestr(arcname, '\n'.join(aps)) - else: - arc.zf.writestr(arcname, lp.read(arcname)) + with Archiver(apsJar) as arc: + with zipfile.ZipFile(apsJar, 'r') as lp: + for arcname in lp.namelist(): + if arcname == config: + arc.zf.writestr(arcname, '\n'.join(aps)) + else: + arc.zf.writestr(arcname, lp.read(arcname)) d.add_update_listener(_refineAnnotationProcessorServiceConfig) self.dists.append(d)