diff mx/sanitycheck.py @ 7685:7d66682cc901

Merge.
author Christian Haeubl <haeubl@ssw.jku.at>
date Fri, 01 Feb 2013 17:06:26 +0100
parents 641a4c6ac1ce
children 01aeaf194641
line wrap: on
line diff
--- a/mx/sanitycheck.py	Fri Feb 01 16:57:40 2013 +0100
+++ b/mx/sanitycheck.py	Fri Feb 01 17:06:26 2013 +0100
@@ -23,10 +23,8 @@
 #
 # ----------------------------------------------------------------------------------------------------
 
-from outputparser import OutputParser, Matcher
-import re
-import mx
-import os
+from outputparser import OutputParser, ValuesMatcher
+import re, mx, commands, os, sys, StringIO, subprocess
 from os.path import isfile, join, exists
 
 dacapoSanityWarmup = {
@@ -103,12 +101,25 @@
     if specjbb2005 is None or not exists(join(specjbb2005, 'jbb.jar')):
         mx.abort('Please set the SPECJBB2005 environment variable to a SPECjbb2005 directory')
     
-    score = re.compile(r"^Valid run, Score is  (?P<score>[0-9]+)$")
+    score = re.compile(r"^Valid run, Score is  (?P<score>[0-9]+)$", re.MULTILINE)
     error = re.compile(r"VALIDATION ERROR")
-    success = re.compile(r"^Valid run, Score is  [0-9]+$")
-    matcher = Matcher(score, {'const:group' : "const:SPECjbb2005", 'const:name' : 'const:score', 'const:score' : 'score'})
+    success = re.compile(r"^Valid run, Score is  [0-9]+$", re.MULTILINE)
+    matcher = ValuesMatcher(score, {'group' : 'SPECjbb2005', 'name' : 'score', 'score' : '<score>'})
     classpath = ['jbb.jar', 'check.jar']
     return Test("SPECjbb2005", ['spec.jbb.JBBmain', '-propfile', 'SPECjbb.props'] + benchArgs, [success], [error], [matcher], vmOpts=['-Xms3g', '-XX:+UseSerialGC', '-XX:-UseCompressedOops', '-cp', os.pathsep.join(classpath)], defaultCwd=specjbb2005)
+
+def getSPECjbb2013(benchArgs = []):
+    
+    specjbb2013 = mx.get_env('SPECJBB2013')
+    if specjbb2013 is None or not exists(join(specjbb2013, 'specjbb2013.jar')):
+        mx.abort('Please set the SPECJBB2013 environment variable to a SPECjbb2013 directory')
+    
+    jops = re.compile(r"^RUN RESULT: hbIR \(max attempted\) = [0-9]+, hbIR \(settled\) = [0-9]+, max-jOPS = (?P<max>[0-9]+), critical-jOPS = (?P<critical>[0-9]+)$", re.MULTILINE)
+    #error?
+    success = re.compile(r"org.spec.jbb.controller: Run finished", re.MULTILINE)
+    matcherMax = ValuesMatcher(jops, {'group' : 'SPECjbb2013', 'name' : 'max', 'score' : '<max>'})
+    matcherCritical = ValuesMatcher(jops, {'group' : 'SPECjbb2013', 'name' : 'critical', 'score' : '<critical>'})
+    return Test("SPECjbb2013", ['-jar', 'specjbb2013.jar', '-m', 'composite'] + benchArgs, [success], [], [matcherCritical, matcherMax], vmOpts=['-Xms7g', '-XX:+UseSerialGC', '-XX:-UseCompressedOops', '-XX:CompileCommand=exclude,*.FastMath::slowLog'], defaultCwd=specjbb2013)
     
 def getSPECjvm2008(benchArgs = [], skipCheck=False, skipKitValidation=False, warmupTime=None, iterationTime=None):
     
@@ -116,11 +127,11 @@
     if specjvm2008 is None or not exists(join(specjvm2008, 'SPECjvm2008.jar')):
         mx.abort('Please set the SPECJVM2008 environment variable to a SPECjvm2008 directory')
     
-    score = re.compile(r"^(Score on|Noncompliant) (?P<benchmark>[a-zA-Z0-9\._]+)( result)?: (?P<score>[0-9]+((,|\.)[0-9]+)?)( SPECjvm2008 Base)? ops/m$")
-    error = re.compile(r"^Errors in benchmark: ")
+    score = re.compile(r"^(Score on|Noncompliant) (?P<benchmark>[a-zA-Z0-9\._]+)( result)?: (?P<score>[0-9]+((,|\.)[0-9]+)?)( SPECjvm2008 Base)? ops/m$", re.MULTILINE)
+    error = re.compile(r"^Errors in benchmark: ", re.MULTILINE)
     # The ' ops/m' at the end of the success string is important : it's how you can tell valid and invalid runs apart
-    success = re.compile(r"^(Noncompliant c|C)omposite result: [0-9]+((,|\.)[0-9]+)?( SPECjvm2008 (Base|Peak))? ops/m$")
-    matcher = Matcher(score, {'const:group' : "const:SPECjvm2008", 'const:name' : 'benchmark', 'const:score' : 'score'}, startNewLine=True)
+    success = re.compile(r"^(Noncompliant c|C)omposite result: [0-9]+((,|\.)[0-9]+)?( SPECjvm2008 (Base|Peak))? ops/m$", re.MULTILINE)
+    matcher = ValuesMatcher(score, {'group' : 'SPECjvm2008', 'name' : '<benchmark>', 'score' : '<score>'})
     
     opts = []
     if warmupTime is not None:
@@ -156,13 +167,13 @@
     if not isfile(dacapo) or not dacapo.endswith('.jar'):
         mx.abort('Specified DaCapo jar file does not exist or is not a jar file: ' + dacapo)
     
-    dacapoSuccess = re.compile(r"^===== DaCapo 9\.12 ([a-zA-Z0-9_]+) PASSED in ([0-9]+) msec =====$")
-    dacapoFail = re.compile(r"^===== DaCapo 9\.12 ([a-zA-Z0-9_]+) FAILED (warmup|) =====$")
+    dacapoSuccess = re.compile(r"^===== DaCapo 9\.12 ([a-zA-Z0-9_]+) PASSED in ([0-9]+) msec =====$", re.MULTILINE)
+    dacapoFail = re.compile(r"^===== DaCapo 9\.12 ([a-zA-Z0-9_]+) FAILED (warmup|) =====$", re.MULTILINE)
     dacapoTime = re.compile(r"===== DaCapo 9\.12 (?P<benchmark>[a-zA-Z0-9_]+) PASSED in (?P<time>[0-9]+) msec =====")
     dacapoTime1 = re.compile(r"===== DaCapo 9\.12 (?P<benchmark>[a-zA-Z0-9_]+) completed warmup 1 in (?P<time>[0-9]+) msec =====")
     
-    dacapoMatcher = Matcher(dacapoTime, {'const:group' : "const:DaCapo", 'const:name' : 'benchmark', 'const:score' : 'time'}, startNewLine=True)
-    dacapoMatcher1 = Matcher(dacapoTime1, {'const:group' : "const:DaCapo-1stRun", 'const:name' : 'benchmark', 'const:score' : 'time'})
+    dacapoMatcher = ValuesMatcher(dacapoTime, {'group' : 'DaCapo', 'name' : '<benchmark>', 'score' : '<time>'})
+    dacapoMatcher1 = ValuesMatcher(dacapoTime1, {'group' : 'DaCapo-1stRun', 'name' : '<benchmark>', 'score' : '<time>'})
     
     return Test("DaCapo-" + name, ['-jar', dacapo, name, '-n', str(n), ] + dacapoArgs, [dacapoSuccess], [dacapoFail], [dacapoMatcher, dacapoMatcher1], ['-Xms2g', '-XX:+UseSerialGC', '-XX:-UseCompressedOops'])
 
@@ -188,38 +199,48 @@
     if not isfile(dacapo) or not dacapo.endswith('.jar'):
         mx.abort('Specified Scala DaCapo jar file does not exist or is not a jar file: ' + dacapo)
     
-    dacapoSuccess = re.compile(r"^===== DaCapo 0\.1\.0(-SNAPSHOT)? ([a-zA-Z0-9_]+) PASSED in ([0-9]+) msec =====$")
-    dacapoFail = re.compile(r"^===== DaCapo 0\.1\.0(-SNAPSHOT)? ([a-zA-Z0-9_]+) FAILED (warmup|) =====$")
+    dacapoSuccess = re.compile(r"^===== DaCapo 0\.1\.0(-SNAPSHOT)? ([a-zA-Z0-9_]+) PASSED in ([0-9]+) msec =====$", re.MULTILINE)
+    dacapoFail = re.compile(r"^===== DaCapo 0\.1\.0(-SNAPSHOT)? ([a-zA-Z0-9_]+) FAILED (warmup|) =====$", re.MULTILINE)
     dacapoTime = re.compile(r"===== DaCapo 0\.1\.0(-SNAPSHOT)? (?P<benchmark>[a-zA-Z0-9_]+) PASSED in (?P<time>[0-9]+) msec =====")
     
-    dacapoMatcher = Matcher(dacapoTime, {'const:group' : "const:Scala-DaCapo", 'const:name' : 'benchmark', 'const:score' : 'time'})
+    dacapoMatcher = ValuesMatcher(dacapoTime, {'group' : "Scala-DaCapo", 'name' : '<benchmark>', 'score' : '<time>'})
     
     return Test("Scala-DaCapo-" + name, ['-jar', dacapo, name, '-n', str(n), ] + dacapoArgs, [dacapoSuccess], [dacapoFail], [dacapoMatcher], ['-Xms2g', '-XX:+UseSerialGC', '-XX:-UseCompressedOops'])
 
 def getBootstraps():
     time = re.compile(r"Bootstrapping Graal\.+ in (?P<time>[0-9]+) ms")
-    scoreMatcher = Matcher(time, {'const:group' : 'const:Bootstrap', 'const:name' : 'const:BootstrapTime', 'const:score' : 'time'})
-    scoreMatcherBig = Matcher(time, {'const:group' : 'const:Bootstrap-bigHeap', 'const:name' : 'const:BootstrapTime', 'const:score' : 'time'})
+    scoreMatcher = ValuesMatcher(time, {'group' : 'Bootstrap', 'name' : 'BootstrapTime', 'score' : '<time>'})
+    scoreMatcherBig = ValuesMatcher(time, {'group' : 'Bootstrap-bigHeap', 'name' : 'BootstrapTime', 'score' : '<time>'})
     
     tests = []
-    tests.append(Test("Bootstrap", ['-version'], successREs=[time], scoreMatchers=[scoreMatcher], ingoreVms=['client', 'server']))
-    tests.append(Test("Bootstrap-bigHeap", ['-version'], successREs=[time], scoreMatchers=[scoreMatcherBig], vmOpts=['-Xms2g'], ingoreVms=['client', 'server']))
+    tests.append(Test("Bootstrap", ['-version'], successREs=[time], scoreMatchers=[scoreMatcher], ignoredVMs=['client', 'server'], benchmarkCompilationRate=False))
+    tests.append(Test("Bootstrap-bigHeap", ['-version'], successREs=[time], scoreMatchers=[scoreMatcherBig], vmOpts=['-Xms2g'], ignoredVMs=['client', 'server'], benchmarkCompilationRate=False))
     return tests
 
+class Tee:
+    def __init__(self):
+        self.output = StringIO.StringIO()
+    def eat(self, line):
+        self.output.write(line)
+        sys.stdout.write(line)
+
 """
 Encapsulates a single program that is a sanity test and/or a benchmark.
 """
 class Test:
-    def __init__(self, name, cmd, successREs=[], failureREs=[], scoreMatchers=[], vmOpts=[], defaultCwd=None, ingoreVms=[]):
+    def __init__(self, name, cmd, successREs=[], failureREs=[], scoreMatchers=[], vmOpts=[], defaultCwd=None, ignoredVMs=[], benchmarkCompilationRate=True):
+
         self.name = name
         self.successREs = successREs
-        self.failureREs = failureREs + [re.compile(r"Exception occured in scope: ")]
+        self.failureREs = failureREs + [re.compile(r"Exception occurred in scope: ")]
         self.scoreMatchers = scoreMatchers
         self.vmOpts = vmOpts
         self.cmd = cmd
         self.defaultCwd = defaultCwd
-        self.ingoreVms = ingoreVms;
-        
+        self.ignoredVMs = ignoredVMs
+        self.benchmarkCompilationRate = benchmarkCompilationRate
+        if benchmarkCompilationRate:
+            self.vmOpts = self.vmOpts + ['-XX:+CITime']
         
     def __str__(self):
         return self.name
@@ -228,82 +249,112 @@
         """
         Run this program as a sanity test.
         """
-        if (vm in self.ingoreVms):
-            return True;
+        if (vm in self.ignoredVMs):
+            return True
         if cwd is None:
             cwd = self.defaultCwd
-        parser = OutputParser(nonZeroIsFatal = False)
+        parser = OutputParser()
         jvmError = re.compile(r"(?P<jvmerror>([A-Z]:|/).*[/\\]hs_err_pid[0-9]+\.log)")
-        parser.addMatcher(Matcher(jvmError, {'const:jvmError' : 'jvmerror'}))
+        parser.addMatcher(ValuesMatcher(jvmError, {'jvmError' : '<jvmerror>'}))
         
         for successRE in self.successREs:
-            parser.addMatcher(Matcher(successRE, {'const:passed' : 'const:1'}))
+            parser.addMatcher(ValuesMatcher(successRE, {'passed' : '1'}))
         for failureRE in self.failureREs:
-            parser.addMatcher(Matcher(failureRE, {'const:failed' : 'const:1'}))
-        
-        result = parser.parse(vm, self.vmOpts + opts + self.cmd, cwd, vmbuild)
-        
-        parsedLines = result['parsed']
-        if len(parsedLines) == 0:
+            parser.addMatcher(ValuesMatcher(failureRE, {'failed' : '1'}))
+
+        tee = Tee()
+        retcode = commands.vm(self.vmOpts + opts + self.cmd, vm, nonZeroIsFatal=False, out=tee.eat, err=subprocess.STDOUT, cwd=cwd, vmbuild=vmbuild)
+        output = tee.output.getvalue()
+        valueMaps = parser.parse(output)
+
+        if len(valueMaps) == 0:
             return False
         
-        assert len(parsedLines) == 1, 'Test matchers should not return more than one line'
+        assert len(valueMaps) == 1, 'Test matchers should not return more than one record'
         
-        parsed = parsedLines[0]
+        record = valueMaps[0]
         
-        if parsed.has_key('jvmError'):
+        jvmErrorFile = record.get('jvmError')
+        if jvmErrorFile:
             mx.log('/!\\JVM Error : dumping error log...')
-            f = open(parsed['jvmError'], 'rb');
-            for line in iter(f.readline, ''):
-                mx.log(line.rstrip())
-            f.close()
-            os.unlink(parsed['jvmError'])
+            with open(jvmErrorFile, 'rb') as fp:
+                mx.log(fp.read())
+            os.unlink(jvmErrorFile)
             return False
         
-        if parsed.has_key('failed') and parsed['failed'] is '1':
+        if record.get('failed') == '1':
             return False
         
-        return result['retcode'] is 0 and parsed.has_key('passed') and parsed['passed'] is '1'
+        return retcode == 0 and record.get('passed') == '1'
     
     def bench(self, vm, cwd=None, opts=[], vmbuild=None):
         """
         Run this program as a benchmark.
         """
-        if (vm in self.ingoreVms):
-            return {};
+        if (vm in self.ignoredVMs):
+            return {}
         if cwd is None:
             cwd = self.defaultCwd
-        parser = OutputParser(nonZeroIsFatal = False)
+        parser = OutputParser()
         
         for successRE in self.successREs:
-            parser.addMatcher(Matcher(successRE, {'const:passed' : 'const:1'}))
+            parser.addMatcher(ValuesMatcher(successRE, {'passed' : '1'}))
         for failureRE in self.failureREs:
-            parser.addMatcher(Matcher(failureRE, {'const:failed' : 'const:1'}))
+            parser.addMatcher(ValuesMatcher(failureRE, {'failed' : '1'}))
         for scoreMatcher in self.scoreMatchers:
             parser.addMatcher(scoreMatcher)
+
+        if self.benchmarkCompilationRate:
+            if vm == 'graal':
+                bps = re.compile(r"ParsedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
+                ibps = re.compile(r"InlinedBytecodesPerSecond@final: (?P<rate>[0-9]+)")
+                parser.addMatcher(ValuesMatcher(bps, {'group' : 'ParsedBytecodesPerSecond', 'name' : self.name, 'score' : '<rate>'}))
+                parser.addMatcher(ValuesMatcher(ibps, {'group' : 'InlinedBytecodesPerSecond', 'name' : self.name, 'score' : '<rate>'}))
+            else:
+                ibps = re.compile(r"(?P<compiler>[\w]+) compilation speed: +(?P<rate>[0-9]+) bytes/s {standard")
+                parser.addMatcher(ValuesMatcher(ibps, {'group' : 'InlinedBytecodesPerSecond', 'name' : '<compiler>:' + self.name, 'score' : '<rate>'}))
             
-        result = parser.parse(vm, self.vmOpts + opts + self.cmd, cwd, vmbuild)
-        if result['retcode'] is not 0:
-            mx.abort("Benchmark failed (non-zero retcode)")
-        
-        parsed = result['parsed']
-        
-        ret = {}
-        
-        passed = False;
+        startDelim = 'START: ' + self.name
+        endDelim = 'END: ' + self.name
         
-        for line in parsed:
-            assert (line.has_key('name') and line.has_key('score') and line.has_key('group')) or line.has_key('passed') or line.has_key('failed')
-            if line.has_key('failed') and line['failed'] is '1':
+        outputfile = os.environ.get('BENCH_OUTPUT', None)
+        if outputfile:
+            # Used only to debug output parsing
+            with open(outputfile) as fp:
+                output = fp.read()
+                start = output.find(startDelim)
+                end = output.find(endDelim, start)
+                if start == -1 and end == -1:
+                    return {}
+                output = output[start + len(startDelim + os.linesep): end]
+                mx.log(startDelim)
+                mx.log(output)
+                mx.log(endDelim)
+        else:
+            tee = Tee()
+            mx.log(startDelim)
+            if commands.vm(self.vmOpts + opts + self.cmd, vm, nonZeroIsFatal=False, out=tee.eat, err=subprocess.STDOUT, cwd=cwd, vmbuild=vmbuild) != 0:
+                mx.abort("Benchmark failed (non-zero retcode)")
+            mx.log(endDelim)
+            output = tee.output.getvalue()
+
+        groups = {}
+        passed = False
+        for valueMap in parser.parse(output):
+            assert (valueMap.has_key('name') and valueMap.has_key('score') and valueMap.has_key('group')) or valueMap.has_key('passed') or valueMap.has_key('failed'), valueMap
+            if valueMap.get('failed') == '1':
                 mx.abort("Benchmark failed")
-            if line.has_key('passed') and line['passed'] is '1':
+            if valueMap.get('passed') == '1':
                 passed = True
-            if line.has_key('name') and line.has_key('score') and line.has_key('group'):
-                if not ret.has_key(line['group']):
-                    ret[line['group']] = {};
-                ret[line['group']][line['name']] = line['score']
+            groupName = valueMap.get('group')
+            if groupName:
+                group = groups.setdefault(groupName, {})
+                name = valueMap.get('name')
+                score = valueMap.get('score')
+                if name and score:
+                    group[name] = score
         
         if not passed:
             mx.abort("Benchmark failed (not passed)")
         
-        return ret
+        return groups