changeset 11490:78d96d411965

Merge.
author Doug Simon <doug.simon@oracle.com>
date Thu, 29 Aug 2013 21:32:54 +0200
parents 0cb481a62384 (current diff) 93c63975217e (diff)
children a03452edfc4d
files
diffstat 19 files changed, 175 insertions(+), 92 deletions(-) [+]
line wrap: on
line diff
--- a/graal/com.oracle.graal.alloc/src/com/oracle/graal/alloc/ComputeBlockOrder.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.alloc/src/com/oracle/graal/alloc/ComputeBlockOrder.java	Thu Aug 29 21:32:54 2013 +0200
@@ -152,39 +152,40 @@
     /**
      * Add a linear path to the code emission order greedily following the most likely successor.
      */
-    private static void addPathToCodeEmittingOrder(Block block, List<Block> order, PriorityQueue<Block> worklist, BitSet visitedBlocks, NodesToDoubles nodeProbabilities) {
-
-        // Skip loop headers if there is only a single loop end block to make the backward jump be a
-        // conditional jump.
-        if (!skipLoopHeader(block)) {
+    private static void addPathToCodeEmittingOrder(Block initialBlock, List<Block> order, PriorityQueue<Block> worklist, BitSet visitedBlocks, NodesToDoubles nodeProbabilities) {
+        Block block = initialBlock;
+        while (block != null) {
+            // Skip loop headers if there is only a single loop end block to
+            // make the backward jump be a conditional jump.
+            if (!skipLoopHeader(block)) {
 
-            // Align unskipped loop headers as they are the target of the backward jump.
-            if (block.isLoopHeader()) {
-                block.setAlign(true);
+                // Align unskipped loop headers as they are the target of the backward jump.
+                if (block.isLoopHeader()) {
+                    block.setAlign(true);
+                }
+                addBlock(block, order);
             }
-            addBlock(block, order);
-        }
 
-        Loop loop = block.getLoop();
-        if (block.isLoopEnd() && skipLoopHeader(loop.header)) {
+            Loop loop = block.getLoop();
+            if (block.isLoopEnd() && skipLoopHeader(loop.header)) {
+
+                // This is the only loop end of a skipped loop header.
+                // Add the header immediately afterwards.
+                addBlock(loop.header, order);
 
-            // This is the only loop end of a skipped loop header. Add the header immediately
-            // afterwards.
-            addBlock(loop.header, order);
-
-            // Make sure the loop successors of the loop header are aligned as they are the target
-            // of the backward jump.
-            for (Block successor : loop.header.getSuccessors()) {
-                if (successor.getLoopDepth() == block.getLoopDepth()) {
-                    successor.setAlign(true);
+                // Make sure the loop successors of the loop header are aligned
+                // as they are the target
+                // of the backward jump.
+                for (Block successor : loop.header.getSuccessors()) {
+                    if (successor.getLoopDepth() == block.getLoopDepth()) {
+                        successor.setAlign(true);
+                    }
                 }
             }
-        }
 
-        Block mostLikelySuccessor = findAndMarkMostLikelySuccessor(block, visitedBlocks, nodeProbabilities);
-        enqueueSuccessors(block, worklist, visitedBlocks);
-        if (mostLikelySuccessor != null) {
-            addPathToCodeEmittingOrder(mostLikelySuccessor, order, worklist, visitedBlocks, nodeProbabilities);
+            Block mostLikelySuccessor = findAndMarkMostLikelySuccessor(block, visitedBlocks, nodeProbabilities);
+            enqueueSuccessors(block, worklist, visitedBlocks);
+            block = mostLikelySuccessor;
         }
     }
 
--- a/graal/com.oracle.graal.api.code/src/com/oracle/graal/api/code/CodeCacheProvider.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.api.code/src/com/oracle/graal/api/code/CodeCacheProvider.java	Thu Aug 29 21:32:54 2013 +0200
@@ -89,4 +89,12 @@
      * Gets a description of the target architecture.
      */
     TargetDescription getTarget();
+
+    /**
+     * Returns the register the runtime uses for maintaining the heap base address. This is mainly
+     * utilized by runtimes which support compressed pointers.
+     * 
+     * @return the register that keeps the heap base address
+     */
+    Register heapBaseRegister();
 }
--- a/graal/com.oracle.graal.compiler.test/src/com/oracle/graal/compiler/test/ea/EscapeAnalysisTest.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.compiler.test/src/com/oracle/graal/compiler/test/ea/EscapeAnalysisTest.java	Thu Aug 29 21:32:54 2013 +0200
@@ -236,6 +236,7 @@
                 HighTierContext context = new HighTierContext(runtime(), assumptions, replacements, null, getDefaultPhasePlan(), OptimisticOptimizations.ALL);
                 new InliningPhase().apply(graph, context);
                 new DeadCodeEliminationPhase().apply(graph);
+                new CanonicalizerPhase(true).apply(graph, context);
                 new PartialEscapePhase(iterativeEscapeAnalysis).apply(graph, context);
                 Assert.assertEquals(1, graph.getNodes(ReturnNode.class).count());
                 ReturnNode returnNode = graph.getNodes(ReturnNode.class).first();
--- a/graal/com.oracle.graal.compiler.test/src/com/oracle/graal/compiler/test/ea/PartialEscapeAnalysisTest.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.compiler.test/src/com/oracle/graal/compiler/test/ea/PartialEscapeAnalysisTest.java	Thu Aug 29 21:32:54 2013 +0200
@@ -124,6 +124,45 @@
         }
     }
 
+    @Test
+    public void testCache() {
+        testMaterialize("testCacheSnippet", 0.5, 1);
+    }
+
+    public static class CacheKey {
+
+        private final int idx;
+        private final Object ref;
+
+        public CacheKey(int idx, Object ref) {
+            this.idx = idx;
+            this.ref = ref;
+        }
+
+        @Override
+        public int hashCode() {
+            return 31 * idx + ref.hashCode();
+        }
+
+        public synchronized boolean equals(CacheKey other) {
+            return idx == other.idx && ref == other.ref;
+        }
+    }
+
+    public static CacheKey cacheKey = null;
+    public static Object value = null;
+
+    private static native Object createValue(CacheKey key);
+
+    public static Object testCacheSnippet(int idx, Object ref) {
+        CacheKey key = new CacheKey(idx, ref);
+        if (!key.equals(cacheKey)) {
+            cacheKey = key;
+            value = createValue(key);
+        }
+        return value;
+    }
+
     @SafeVarargs
     final void testMaterialize(final String snippet, double expectedProbability, int expectedCount, Class<? extends Node>... invalidNodeClasses) {
         StructuredGraph result = processMethod(snippet);
@@ -162,15 +201,14 @@
                 HighTierContext context = new HighTierContext(runtime(), assumptions, replacements, null, getDefaultPhasePlan(), OptimisticOptimizations.ALL);
                 new InliningPhase().apply(graph, context);
                 new DeadCodeEliminationPhase().apply(graph);
-                CanonicalizerPhase canonicalizer = new CanonicalizerPhase(true);
-                canonicalizer.apply(graph, context);
+                new CanonicalizerPhase(true).apply(graph, context);
                 new PartialEscapePhase(false).apply(graph, context);
 
                 for (MergeNode merge : graph.getNodes(MergeNode.class)) {
                     merge.setStateAfter(null);
                 }
                 new DeadCodeEliminationPhase().apply(graph);
-                canonicalizer.apply(graph, context);
+                new CanonicalizerPhase(true).apply(graph, context);
                 return graph;
             }
         });
--- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotBackend.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotBackend.java	Thu Aug 29 21:32:54 2013 +0200
@@ -247,7 +247,7 @@
             HotSpotRuntime hr = ((HotSpotRuntime) gen.getRuntime());
             if (hr.config.useCompressedKlassPointers) {
                 Register register = r10;
-                AMD64Move.decodeKlassPointer(asm, register, src, hr.config.narrowKlassBase, hr.config.narrowKlassShift, hr.config.logKlassAlignment);
+                AMD64Move.decodeKlassPointer(asm, register, hr.heapBaseRegister(), src, hr.config.narrowKlassBase, hr.config.narrowKlassShift, hr.config.logKlassAlignment);
                 asm.cmpq(inlineCacheKlass, register);
             } else {
                 asm.cmpq(inlineCacheKlass, src);
--- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotLIRGenerator.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotLIRGenerator.java	Thu Aug 29 21:32:54 2013 +0200
@@ -404,6 +404,9 @@
         }
     }
 
+    /**
+     * Returns whether or not the input access is a (de)compression candidate.
+     */
     private static boolean isCompressCandidate(DeoptimizingNode access) {
         return access != null && ((HeapAccess) access).isCompressible();
     }
@@ -413,13 +416,23 @@
         AMD64AddressValue loadAddress = asAddressValue(address);
         Variable result = newVariable(kind);
         assert access == null || access instanceof HeapAccess;
+        /**
+         * Currently, the (de)compression of pointers applies conditionally to some objects (oops,
+         * kind==Object) and some addresses (klass pointers, kind==Long). Initially, the input
+         * operation is checked to discover if it has been tagged as a potential "compression"
+         * candidate. Consequently, depending on the appropriate kind, the specific (de)compression
+         * functions are being called. Although, currently, the compression and decompression
+         * algorithms of oops and klass pointers are identical, in hotspot, they are implemented as
+         * separate methods. That means that in the future there might be the case where the
+         * algorithms may differ.
+         */
         if (isCompressCandidate(access)) {
             if (runtime().config.useCompressedOops && kind == Kind.Object) {
-                append(new LoadCompressedPointer(kind, result, loadAddress, access != null ? state(access) : null, runtime().config.narrowOopBase, runtime().config.narrowOopShift,
-                                runtime().config.logMinObjAlignment));
+                append(new LoadCompressedPointer(kind, result, runtime().heapBaseRegister().asValue(), loadAddress, access != null ? state(access) : null, runtime().config.narrowOopBase,
+                                runtime().config.narrowOopShift, runtime().config.logMinObjAlignment));
             } else if (runtime().config.useCompressedKlassPointers && kind == Kind.Long) {
-                append(new LoadCompressedPointer(kind, result, loadAddress, access != null ? state(access) : null, runtime().config.narrowKlassBase, runtime().config.narrowKlassShift,
-                                runtime().config.logKlassAlignment));
+                append(new LoadCompressedPointer(kind, result, runtime().heapBaseRegister().asValue(), loadAddress, access != null ? state(access) : null, runtime().config.narrowKlassBase,
+                                runtime().config.narrowKlassShift, runtime().config.logKlassAlignment));
             } else {
                 append(new LoadOp(kind, result, loadAddress, access != null ? state(access) : null));
             }
--- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotRuntime.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotRuntime.java	Thu Aug 29 21:32:54 2013 +0200
@@ -96,6 +96,11 @@
     }
 
     @Override
+    public Register heapBaseRegister() {
+        return r12;
+    }
+
+    @Override
     protected RegisterConfig createRegisterConfig() {
         return new AMD64HotSpotRegisterConfig(graalRuntime.getTarget().arch, config);
     }
--- a/graal/com.oracle.graal.hotspot.sparc/src/com/oracle/graal/hotspot/sparc/SPARCHotSpotRuntime.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.hotspot.sparc/src/com/oracle/graal/hotspot/sparc/SPARCHotSpotRuntime.java	Thu Aug 29 21:32:54 2013 +0200
@@ -83,6 +83,11 @@
     }
 
     @Override
+    public Register heapBaseRegister() {
+        return r12;
+    }
+
+    @Override
     protected RegisterConfig createRegisterConfig() {
         return new SPARCHotSpotRegisterConfig(graalRuntime.getTarget().arch, config);
     }
--- a/graal/com.oracle.graal.hotspot.test/src/com/oracle/graal/hotspot/test/WriteBarrierAdditionTest.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.hotspot.test/src/com/oracle/graal/hotspot/test/WriteBarrierAdditionTest.java	Thu Aug 29 21:32:54 2013 +0200
@@ -195,7 +195,7 @@
      */
     @Test
     public void test10() throws Exception {
-        test2("testUnsafeLoad", wr, new Long(8), new Integer(8));
+        test2("testUnsafeLoad", wr, new Long(useCompressedOops() ? 6 : 8), new Integer(useCompressedOops() ? 6 : 8));
     }
 
     /**
--- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotRuntime.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotRuntime.java	Thu Aug 29 21:32:54 2013 +0200
@@ -113,7 +113,7 @@
     private InstanceOfSnippets.Templates instanceofSnippets;
     private NewObjectSnippets.Templates newObjectSnippets;
     private MonitorSnippets.Templates monitorSnippets;
-    private WriteBarrierSnippets.Templates writeBarrierSnippets;
+    protected WriteBarrierSnippets.Templates writeBarrierSnippets;
     private BoxingSnippets.Templates boxingSnippets;
     private LoadExceptionObjectSnippets.Templates exceptionObjectSnippets;
     private UnsafeLoadSnippets.Templates unsafeLoadSnippets;
@@ -643,7 +643,7 @@
                 UnsafeLoadNode load = (UnsafeLoadNode) n;
                 assert load.kind() != Kind.Illegal;
                 boolean compressible = (!load.object().isNullConstant() && load.accessKind() == Kind.Object);
-                if (addReadBarrier(load)) {
+                if (addReadBarrier(load, tool)) {
                     unsafeLoadSnippets.lower(load, tool);
                 } else {
                     IndexedLocationNode location = IndexedLocationNode.create(ANY_LOCATION, load.accessKind(), load.displacement(), load.offset(), graph, 1);
@@ -852,13 +852,12 @@
         }
     }
 
-    private static boolean addReadBarrier(UnsafeLoadNode load) {
-        if (useG1GC()) {
-            if (load.object().kind() == Kind.Object && load.accessKind() == Kind.Object && !ObjectStamp.isObjectAlwaysNull(load.object())) {
-                ResolvedJavaType type = ObjectStamp.typeOrNull(load.object());
-                if (type != null && !type.isArray()) {
-                    return true;
-                }
+    private static boolean addReadBarrier(UnsafeLoadNode load, LoweringTool tool) {
+        if (useG1GC() && tool.getLoweringType() == LoweringType.AFTER_GUARDS && load.object().kind() == Kind.Object && load.accessKind() == Kind.Object &&
+                        !ObjectStamp.isObjectAlwaysNull(load.object())) {
+            ResolvedJavaType type = ObjectStamp.typeOrNull(load.object());
+            if (type != null && !type.isArray()) {
+                return true;
             }
         }
         return false;
--- a/graal/com.oracle.graal.lir.amd64/src/com/oracle/graal/lir/amd64/AMD64Move.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.lir.amd64/src/com/oracle/graal/lir/amd64/AMD64Move.java	Thu Aug 29 21:32:54 2013 +0200
@@ -24,7 +24,6 @@
 
 import static com.oracle.graal.api.code.ValueUtil.*;
 import static com.oracle.graal.lir.LIRInstruction.OperandFlag.*;
-
 import static java.lang.Double.*;
 import static java.lang.Float.*;
 
@@ -123,12 +122,14 @@
         private long base;
         private int shift;
         private int alignment;
+        @Alive({REG}) protected AllocatableValue heapBaseRegister;
 
-        public LoadCompressedPointer(Kind kind, AllocatableValue result, AMD64AddressValue address, LIRFrameState state, long base, int shift, int alignment) {
+        public LoadCompressedPointer(Kind kind, AllocatableValue result, AllocatableValue heapBaseRegister, AMD64AddressValue address, LIRFrameState state, long base, int shift, int alignment) {
             super(kind, result, address, state);
             this.base = base;
             this.shift = shift;
             this.alignment = alignment;
+            this.heapBaseRegister = heapBaseRegister;
             assert kind == Kind.Object || kind == Kind.Long;
         }
 
@@ -137,9 +138,9 @@
             Register resRegister = asRegister(result);
             masm.movl(resRegister, address.toAddress());
             if (kind == Kind.Object) {
-                decodePointer(masm, resRegister, base, shift, alignment);
+                decodePointer(masm, resRegister, asRegister(heapBaseRegister), base, shift, alignment);
             } else {
-                decodeKlassPointer(masm, resRegister, base, shift, alignment);
+                decodeKlassPointer(masm, resRegister, asRegister(heapBaseRegister), base, shift, alignment);
             }
         }
     }
@@ -214,9 +215,9 @@
         public void emitCode(TargetMethodAssembler tasm, AMD64MacroAssembler masm) {
             masm.movq(asRegister(scratch), asRegister(input));
             if (kind == Kind.Object) {
-                encodePointer(masm, asRegister(scratch), base, shift, alignment);
+                encodePointer(masm, asRegister(scratch), tasm.runtime.heapBaseRegister(), base, shift, alignment);
             } else {
-                encodeKlassPointer(masm, asRegister(scratch), base, shift, alignment);
+                encodeKlassPointer(masm, asRegister(scratch), tasm.runtime.heapBaseRegister(), base, shift, alignment);
             }
             if (state != null) {
                 tasm.recordImplicitException(masm.codeBuffer.position(), state);
@@ -656,16 +657,16 @@
         final Register scratchRegister = asRegister(scratch);
         final Register cmpRegister = asRegister(cmpValue);
         final Register newRegister = asRegister(newValue);
-        encodePointer(masm, cmpRegister, base, shift, alignment);
+        encodePointer(masm, cmpRegister, tasm.runtime.heapBaseRegister(), base, shift, alignment);
         masm.movq(scratchRegister, newRegister);
-        encodePointer(masm, scratchRegister, base, shift, alignment);
+        encodePointer(masm, scratchRegister, tasm.runtime.heapBaseRegister(), base, shift, alignment);
         if (tasm.target.isMP) {
             masm.lock();
         }
         masm.cmpxchgl(scratchRegister, address.toAddress());
     }
 
-    private static void encodePointer(AMD64MacroAssembler masm, Register scratchRegister, long base, int shift, int alignment) {
+    private static void encodePointer(AMD64MacroAssembler masm, Register scratchRegister, Register heapBaseRegister, long base, int shift, int alignment) {
         // If the base is zero, the uncompressed address has to be shifted right
         // in order to be compressed.
         if (base == 0) {
@@ -679,13 +680,13 @@
             masm.testq(scratchRegister, scratchRegister);
             // If the stored reference is null, move the heap to scratch
             // register and then calculate the compressed oop value.
-            masm.cmovq(ConditionFlag.Equal, scratchRegister, AMD64.r12);
-            masm.subq(scratchRegister, AMD64.r12);
+            masm.cmovq(ConditionFlag.Equal, scratchRegister, heapBaseRegister);
+            masm.subq(scratchRegister, heapBaseRegister);
             masm.shrq(scratchRegister, alignment);
         }
     }
 
-    private static void decodePointer(AMD64MacroAssembler masm, Register resRegister, long base, int shift, int alignment) {
+    private static void decodePointer(AMD64MacroAssembler masm, Register resRegister, Register heapBaseRegister, long base, int shift, int alignment) {
         // If the base is zero, the compressed address has to be shifted left
         // in order to be uncompressed.
         if (base == 0) {
@@ -698,14 +699,14 @@
             masm.shlq(resRegister, alignment);
             masm.jccb(ConditionFlag.Equal, done);
             // Otherwise the heap base is added to the shifted address.
-            masm.addq(resRegister, AMD64.r12);
+            masm.addq(resRegister, heapBaseRegister);
             masm.bind(done);
         }
     }
 
-    private static void encodeKlassPointer(AMD64MacroAssembler masm, Register scratchRegister, long base, int shift, int alignment) {
+    private static void encodeKlassPointer(AMD64MacroAssembler masm, Register scratchRegister, Register heapBaseRegister, long base, int shift, int alignment) {
         if (base != 0) {
-            masm.subq(scratchRegister, AMD64.r12);
+            masm.subq(scratchRegister, heapBaseRegister);
         }
         if (shift != 0) {
             assert alignment == shift : "Encode algorithm is wrong";
@@ -713,20 +714,21 @@
         }
     }
 
-    private static void decodeKlassPointer(AMD64MacroAssembler masm, Register resRegister, long base, int shift, int alignment) {
+    private static void decodeKlassPointer(AMD64MacroAssembler masm, Register resRegister, Register heapBaseRegister, long base, int shift, int alignment) {
         if (shift != 0) {
             assert alignment == shift : "Decode algorithm is wrong";
             masm.shlq(resRegister, alignment);
             if (base != 0) {
-                masm.addq(resRegister, AMD64.r12);
+                masm.addq(resRegister, heapBaseRegister);
             }
         } else {
             assert base == 0 : "Sanity";
         }
     }
 
-    public static void decodeKlassPointer(AMD64MacroAssembler masm, Register register, AMD64Address address, long narrowKlassBase, int narrowKlassShift, int logKlassAlignment) {
+    public static void decodeKlassPointer(AMD64MacroAssembler masm, Register register, Register heapBaseRegister, AMD64Address address, long narrowKlassBase, int narrowKlassShift,
+                    int logKlassAlignment) {
         masm.movl(register, address);
-        decodeKlassPointer(masm, register, narrowKlassBase, narrowKlassShift, logKlassAlignment);
+        decodeKlassPointer(masm, register, heapBaseRegister, narrowKlassBase, narrowKlassShift, logKlassAlignment);
     }
 }
--- a/graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/LoopBeginNode.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/LoopBeginNode.java	Thu Aug 29 21:32:54 2013 +0200
@@ -47,6 +47,7 @@
     }
 
     public void setLoopFrequency(double loopFrequency) {
+        assert loopFrequency >= 0;
         this.loopFrequency = loopFrequency;
     }
 
--- a/graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/util/NodesToDoubles.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/util/NodesToDoubles.java	Thu Aug 29 21:32:54 2013 +0200
@@ -35,7 +35,7 @@
     }
 
     public void put(FixedNode n, double value) {
-        assert value >= 0.0;
+        assert value >= 0.0 : value;
         nodeProbabilities.put(n, value);
     }
 
--- a/graal/com.oracle.graal.phases.common/src/com/oracle/graal/phases/common/ConvertDeoptimizeToGuardPhase.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.phases.common/src/com/oracle/graal/phases/common/ConvertDeoptimizeToGuardPhase.java	Thu Aug 29 21:32:54 2013 +0200
@@ -34,7 +34,7 @@
  * This phase will find branches which always end with a {@link DeoptimizeNode} and replace their
  * {@link ControlSplitNode ControlSplitNodes} with {@link FixedGuardNode FixedGuardNodes}.
  * 
- * This is useful because {@link FixedGuardNode FixedGuardNodes FixedGuardNodess} will be lowered to
+ * This is useful because {@link FixedGuardNode FixedGuardNodes} will be lowered to
  * {@link GuardNode GuardNodes} which can later be optimized more aggressively than control-flow
  * constructs.
  * 
--- a/graal/com.oracle.graal.phases.common/src/com/oracle/graal/phases/common/LoweringPhase.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.phases.common/src/com/oracle/graal/phases/common/LoweringPhase.java	Thu Aug 29 21:32:54 2013 +0200
@@ -128,7 +128,7 @@
         }
 
         private void setLastFixedNode(FixedWithNextNode n) {
-            assert n == null || n.isAlive() : n;
+            assert n.isAlive() : n;
             lastFixedNode = n;
         }
     }
@@ -220,7 +220,7 @@
 
             // Lower the instructions of this block.
             List<ScheduledNode> nodes = schedule.nodesFor(b);
-            loweringTool.setLastFixedNode(null);
+            loweringTool.setLastFixedNode(b.getBeginNode());
             for (Node node : nodes) {
 
                 if (node.isDeleted()) {
@@ -228,21 +228,11 @@
                     continue;
                 }
 
-                if (loweringTool.lastFixedNode() == null) {
-                    AbstractBeginNode beginNode = b.getBeginNode();
-                    if (node == beginNode) {
-                        loweringTool.setLastFixedNode(beginNode);
-                    } else {
-                        continue;
-                    }
-                }
-
                 // Cache the next node to be able to reconstruct the previous of the next node
                 // after lowering.
                 FixedNode nextNode = null;
                 if (node instanceof FixedWithNextNode) {
-                    FixedWithNextNode fixed = (FixedWithNextNode) node;
-                    nextNode = fixed.next();
+                    nextNode = ((FixedWithNextNode) node).next();
                 } else {
                     nextNode = loweringTool.lastFixedNode().next();
                 }
@@ -253,14 +243,19 @@
                 }
 
                 if (!nextNode.isAlive()) {
+                    // can happen when the rest of the block is killed by lowering (e.g. by a
+                    // unconditional deopt)
                     break;
                 } else {
                     Node nextLastFixed = nextNode.predecessor();
-                    if (nextLastFixed instanceof FixedWithNextNode) {
-                        loweringTool.setLastFixedNode((FixedWithNextNode) nextLastFixed);
-                    } else {
-                        loweringTool.setLastFixedNode((FixedWithNextNode) nextNode);
+                    if (!(nextLastFixed instanceof FixedWithNextNode)) {
+                        // insert begin node, to have a valid last fixed for next lowerable node.
+                        BeginNode begin = node.graph().add(new BeginNode());
+                        nextLastFixed.replaceFirstSuccessor(nextNode, begin);
+                        begin.setNext(nextNode);
+                        nextLastFixed = begin;
                     }
+                    loweringTool.setLastFixedNode((FixedWithNextNode) nextLastFixed);
                 }
             }
         }
--- a/graal/com.oracle.graal.phases/src/com/oracle/graal/phases/graph/ComputeProbabilityClosure.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.phases/src/com/oracle/graal/phases/graph/ComputeProbabilityClosure.java	Thu Aug 29 21:32:54 2013 +0200
@@ -187,7 +187,7 @@
                         if (t == -1) {
                             return -1;
                         }
-                        factor *= t;
+                        factor = multiplySaturate(factor, t);
                     }
                     backEdgeProb += nodeProbabilities.get(le) * factor;
                 }
@@ -203,6 +203,21 @@
         }
     }
 
+    /**
+     * Multiplies a and b and saturates the result to 1/{@link Double#MIN_NORMAL}.
+     * 
+     * @param a
+     * @param b
+     * @return a times b saturated to 1/{@link Double#MIN_NORMAL}
+     */
+    public static double multiplySaturate(double a, double b) {
+        double r = a * b;
+        if (r > 1 / Double.MIN_NORMAL) {
+            return 1 / Double.MIN_NORMAL;
+        }
+        return r;
+    }
+
     private class Probability extends MergeableState<Probability> {
 
         public double probability;
@@ -236,7 +251,7 @@
                         if (loopFrequency == -1) {
                             return false;
                         }
-                        probability *= loopFrequency;
+                        probability = multiplySaturate(probability, loopFrequency);
                         assert probability >= 0;
                     }
                 }
@@ -248,7 +263,7 @@
                             if (loopFrequency == -1) {
                                 return false;
                             }
-                            prob *= loopFrequency;
+                            prob = multiplySaturate(prob, loopFrequency);
                             assert prob >= 0;
                         }
                     }
@@ -335,7 +350,7 @@
                 assert loops != null;
                 double countProd = 1;
                 for (LoopInfo loop : loops) {
-                    countProd *= loop.loopFrequency(nodeProbabilities);
+                    countProd = multiplySaturate(countProd, loop.loopFrequency(nodeProbabilities));
                 }
                 count = countProd;
             }
@@ -344,7 +359,7 @@
 
         @Override
         public void loopBegin(LoopBeginNode loopBegin) {
-            count *= loopBegin.loopFrequency();
+            count = multiplySaturate(count, loopBegin.loopFrequency());
         }
     }
 
--- a/graal/com.oracle.graal.phases/src/com/oracle/graal/phases/schedule/SchedulePhase.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.phases/src/com/oracle/graal/phases/schedule/SchedulePhase.java	Thu Aug 29 21:32:54 2013 +0200
@@ -770,12 +770,12 @@
                     // If a FrameState is an outer FrameState this method behaves as if the inner
                     // FrameState was the actual usage, by recursing.
                     blocksForUsage(node, unscheduledUsage, closure, strategy);
-                } else if (unscheduledUsage instanceof MergeNode) {
-                    // Only FrameStates can be connected to MergeNodes.
+                } else if (unscheduledUsage instanceof AbstractBeginNode) {
+                    // Only FrameStates can be connected to BeginNodes.
                     if (!(usage instanceof FrameState)) {
                         throw new SchedulingError(usage.toString());
                     }
-                    // If a FrameState belongs to a MergeNode then it's inputs will be placed at the
+                    // If a FrameState belongs to a BeginNode then it's inputs will be placed at the
                     // common dominator of all EndNodes.
                     for (Node pred : unscheduledUsage.cfgPredecessors()) {
                         closure.apply(cfg.getNodeToBlock().get(pred));
--- a/graal/com.oracle.graal.truffle/src/com/oracle/graal/truffle/nodes/frame/FrameAccessNode.java	Thu Aug 29 21:32:00 2013 +0200
+++ b/graal/com.oracle.graal.truffle/src/com/oracle/graal/truffle/nodes/frame/FrameAccessNode.java	Thu Aug 29 21:32:54 2013 +0200
@@ -133,6 +133,8 @@
                 return Kind.Long;
             case Int:
                 return Kind.Int;
+            case Byte:
+                return Kind.Byte;
             case Double:
                 return Kind.Double;
             case Float:
--- a/src/share/vm/runtime/arguments.cpp	Thu Aug 29 21:32:00 2013 +0200
+++ b/src/share/vm/runtime/arguments.cpp	Thu Aug 29 21:32:54 2013 +0200
@@ -2203,10 +2203,8 @@
 #ifdef GRAAL
   if (UseG1GC) {
       if (IgnoreUnrecognizedVMOptions) {
-        warning("UseG1GC is still experimental in Graal, use SerialGC instead ");
         FLAG_SET_CMDLINE(bool, UseG1GC, true);
       } else {
-        warning("UseG1GC is still experimental in Graal, use SerialGC instead ");
         status = true;
       }
   } else {