changeset 23064:a7c7205bd224

TraceRA: add TraceLinearScanWalker.findOptimalSpillPos().
author Josef Eisl <josef.eisl@jku.at>
date Fri, 20 Nov 2015 11:26:35 +0100
parents 7f4d76ddf97f
children cc80ff107273
files graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceInterval.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScan.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanEliminateSpillMovePhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanRegisterAllocationPhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanWalker.java
diffstat 5 files changed, 202 insertions(+), 64 deletions(-) [+]
line wrap: on
line diff
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceInterval.java	Thu Nov 19 18:09:53 2015 +0100
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceInterval.java	Fri Nov 20 11:26:35 2015 +0100
@@ -514,7 +514,9 @@
     }
 
     public void setSpillDefinitionPos(int pos) {
-        assert spillState() == SpillState.NoDefinitionFound || spillDefinitionPos() == -1 : "cannot set the position twice";
+        assert spillState() == SpillState.NoDefinitionFound || spillState() == SpillState.NoSpillStore || spillDefinitionPos() == -1 : "cannot set the position twice";
+        int to = to();
+        assert pos < to : String.format("Cannot spill %s at %d", this, pos);
         splitParent().spillDefinitionPos = pos;
     }
 
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScan.java	Thu Nov 19 18:09:53 2015 +0100
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScan.java	Fri Nov 20 11:26:35 2015 +0100
@@ -61,6 +61,7 @@
 import com.oracle.graal.lir.LIRInstruction;
 import com.oracle.graal.lir.LIRInstruction.OperandFlag;
 import com.oracle.graal.lir.LIRInstruction.OperandMode;
+import com.oracle.graal.lir.StandardOp.BlockEndOp;
 import com.oracle.graal.lir.ValueConsumer;
 import com.oracle.graal.lir.Variable;
 import com.oracle.graal.lir.VirtualStackSlot;
@@ -488,6 +489,12 @@
         return opId == 0 || blockForId(opId) != blockForId(opId - 1);
     }
 
+    boolean isBlockEnd(int opId) {
+        boolean isBlockBegin = isBlockBegin(opId + 2);
+        assert isBlockBegin == (instructionForId(opId & (~1)) instanceof BlockEndOp);
+        return isBlockBegin;
+    }
+
     boolean coversBlockBegin(int opId1, int opId2) {
         return blockForId(opId1) != blockForId(opId2);
     }
@@ -515,7 +522,7 @@
 
     // * Phase 5: actual register allocation
 
-    private static <T extends IntervalHint> boolean isSorted(T[] intervals) {
+    private static <T extends IntervalHint> boolean isSortedByFrom(T[] intervals) {
         int from = -1;
         for (T interval : intervals) {
             assert interval != null;
@@ -525,6 +532,16 @@
         return true;
     }
 
+    private static boolean isSortedBySpillPos(TraceInterval[] intervals) {
+        int from = -1;
+        for (TraceInterval interval : intervals) {
+            assert interval != null;
+            assert from <= interval.spillDefinitionPos();
+            from = interval.spillDefinitionPos();
+        }
+        return true;
+    }
+
     private static TraceInterval addToList(TraceInterval first, TraceInterval prev, TraceInterval interval) {
         TraceInterval newFirst = first;
         if (prev != null) {
@@ -535,8 +552,17 @@
         return newFirst;
     }
 
-    TraceInterval createUnhandledList(IntervalPredicate isList1) {
-        assert isSorted(sortedIntervals) : "interval list is not sorted";
+    TraceInterval createUnhandledListByFrom(IntervalPredicate isList1) {
+        assert isSortedByFrom(sortedIntervals) : "interval list is not sorted";
+        return createUnhandledList(isList1);
+    }
+
+    TraceInterval createUnhandledListBySpillPos(IntervalPredicate isList1) {
+        assert isSortedBySpillPos(sortedIntervals) : "interval list is not sorted";
+        return createUnhandledList(isList1);
+    }
+
+    private TraceInterval createUnhandledList(IntervalPredicate isList1) {
 
         TraceInterval list1 = TraceInterval.EndMarker;
 
@@ -576,7 +602,7 @@
     }
 
     FixedInterval createFixedUnhandledList() {
-        assert isSorted(sortedFixedIntervals) : "interval list is not sorted";
+        assert isSortedByFrom(sortedFixedIntervals) : "interval list is not sorted";
 
         FixedInterval list1 = FixedInterval.EndMarker;
 
@@ -686,6 +712,12 @@
         sortedIntervals = combinedList;
     }
 
+    void sortIntervalsBySpillPos() {
+        // TODO (JE): better algorithm?
+        // conventional sort-algorithm for new intervals
+        Arrays.sort(sortedIntervals, (TraceInterval a, TraceInterval b) -> a.spillDefinitionPos() - b.spillDefinitionPos());
+    }
+
     // wrapper for Interval.splitChildAtOpId that performs a bailout in product mode
     // instead of returning null
     public TraceInterval splitChildAtOpId(TraceInterval interval, int opId, LIRInstruction.OperandMode mode) {
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanEliminateSpillMovePhase.java	Thu Nov 19 18:09:53 2015 +0100
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanEliminateSpillMovePhase.java	Fri Nov 20 11:26:35 2015 +0100
@@ -39,6 +39,7 @@
 import com.oracle.graal.debug.Indent;
 import com.oracle.graal.lir.LIRInsertionBuffer;
 import com.oracle.graal.lir.LIRInstruction;
+import com.oracle.graal.lir.LIRInstruction.OperandMode;
 import com.oracle.graal.lir.StandardOp.LoadConstantOp;
 import com.oracle.graal.lir.StandardOp.MoveOp;
 import com.oracle.graal.lir.StandardOp.ValueMoveOp;
@@ -72,15 +73,23 @@
     @SuppressWarnings("try")
     private static void eliminateSpillMoves(TraceLinearScan allocator, boolean shouldEliminateSpillMoves, TraceBuilderResult<?> traceBuilderResult) {
         try (Indent indent = Debug.logAndIndent("Eliminating unnecessary spill moves: Trace%d", traceBuilderResult.getTraceForBlock(allocator.sortedBlocks().get(0)))) {
+            allocator.sortIntervalsBySpillPos();
 
             /*
              * collect all intervals that must be stored after their definition. The list is sorted
              * by Interval.spillDefinitionPos.
              */
-            TraceInterval interval = allocator.createUnhandledList(spilledIntervals);
+            TraceInterval interval = allocator.createUnhandledListBySpillPos(spilledIntervals);
             if (DetailedAsserts.getValue()) {
                 checkIntervals(interval);
             }
+            if (Debug.isLogEnabled()) {
+                try (Indent indent2 = Debug.logAndIndent("Sorted intervals")) {
+                    for (TraceInterval i = interval; i != null; i = i.next) {
+                        Debug.log("%5d: %s", i.spillDefinitionPos(), i);
+                    }
+                }
+            }
 
             LIRInsertionBuffer insertionBuffer = new LIRInsertionBuffer();
             for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
@@ -93,70 +102,75 @@
                     for (int j = 0; j < numInst; j++) {
                         LIRInstruction op = instructions.get(j);
                         int opId = op.id();
+                        try (Indent indent2 = Debug.logAndIndent("%5d %s", opId, op)) {
 
-                        if (opId == -1) {
-                            MoveOp move = (MoveOp) op;
-                            /*
-                             * Remove move from register to stack if the stack slot is guaranteed to
-                             * be correct. Only moves that have been inserted by LinearScan can be
-                             * removed.
-                             */
-                            if (shouldEliminateSpillMoves && canEliminateSpillMove(allocator, block, move, lastOpId)) {
+                            if (opId == -1) {
+                                MoveOp move = (MoveOp) op;
                                 /*
-                                 * Move target is a stack slot that is always correct, so eliminate
-                                 * instruction.
+                                 * Remove move from register to stack if the stack slot is
+                                 * guaranteed to be correct. Only moves that have been inserted by
+                                 * LinearScan can be removed.
                                  */
-                                if (Debug.isLogEnabled()) {
-                                    if (move instanceof ValueMoveOp) {
-                                        ValueMoveOp vmove = (ValueMoveOp) move;
-                                        Debug.log("eliminating move from interval %d (%s) to %d (%s) in block %s", allocator.operandNumber(vmove.getInput()), vmove.getInput(),
-                                                        allocator.operandNumber(vmove.getResult()), vmove.getResult(), block);
-                                    } else {
-                                        LoadConstantOp load = (LoadConstantOp) move;
-                                        Debug.log("eliminating constant load from %s to %d (%s) in block %s", load.getConstant(), allocator.operandNumber(load.getResult()), load.getResult(), block);
+                                if (shouldEliminateSpillMoves && canEliminateSpillMove(allocator, block, move, lastOpId)) {
+                                    /*
+                                     * Move target is a stack slot that is always correct, so
+                                     * eliminate instruction.
+                                     */
+                                    if (Debug.isLogEnabled()) {
+                                        if (move instanceof ValueMoveOp) {
+                                            ValueMoveOp vmove = (ValueMoveOp) move;
+                                            Debug.log("eliminating move from interval %d (%s) to %d (%s) in block %s", allocator.operandNumber(vmove.getInput()), vmove.getInput(),
+                                                            allocator.operandNumber(vmove.getResult()), vmove.getResult(), block);
+                                        } else {
+                                            LoadConstantOp load = (LoadConstantOp) move;
+                                            Debug.log("eliminating constant load from %s to %d (%s) in block %s", load.getConstant(), allocator.operandNumber(load.getResult()), load.getResult(),
+                                                            block);
+                                        }
                                     }
+
+                                    // null-instructions are deleted by assignRegNum
+                                    instructions.set(j, null);
                                 }
 
-                                // null-instructions are deleted by assignRegNum
-                                instructions.set(j, null);
-                            }
-
-                        } else {
-                            lastOpId = opId;
-                            /*
-                             * Insert move from register to stack just after the beginning of the
-                             * interval.
-                             */
-                            assert interval == TraceInterval.EndMarker || interval.spillDefinitionPos() >= opId : "invalid order";
-                            assert interval == TraceInterval.EndMarker || (interval.isSplitParent() && SpillState.IN_MEMORY.contains(interval.spillState())) : "invalid interval";
+                            } else {
+                                lastOpId = opId;
+                                /*
+                                 * Insert move from register to stack just after the beginning of
+                                 * the interval.
+                                 */
+                                // assert interval == TraceInterval.EndMarker ||
+                                // interval.spillDefinitionPos() >= opId : "invalid order";
+                                assert interval == TraceInterval.EndMarker || (interval.isSplitParent() && SpillState.IN_MEMORY.contains(interval.spillState())) : "invalid interval";
 
-                            while (interval != TraceInterval.EndMarker && interval.spillDefinitionPos() == opId) {
-                                if (!interval.canMaterialize()) {
-                                    if (!insertionBuffer.initialized()) {
-                                        /*
-                                         * prepare insertion buffer (appended when all instructions
-                                         * in the block are processed)
-                                         */
-                                        insertionBuffer.init(instructions);
-                                    }
+                                while (interval != TraceInterval.EndMarker && interval.spillDefinitionPos() == opId) {
+                                    Debug.log("handle %s", interval);
+                                    if (!interval.canMaterialize()) {
+                                        if (!insertionBuffer.initialized()) {
+                                            /*
+                                             * prepare insertion buffer (appended when all
+                                             * instructions in the block are processed)
+                                             */
+                                            insertionBuffer.init(instructions);
+                                        }
 
-                                    AllocatableValue fromLocation = interval.location();
-                                    AllocatableValue toLocation = TraceLinearScan.canonicalSpillOpr(interval);
-                                    if (!fromLocation.equals(toLocation)) {
+                                        AllocatableValue fromLocation = interval.getSplitChildAtOpId(opId, OperandMode.DEF, allocator).location();
+                                        AllocatableValue toLocation = TraceLinearScan.canonicalSpillOpr(interval);
+                                        if (!fromLocation.equals(toLocation)) {
 
-                                        assert isRegister(fromLocation) : "from operand must be a register but is: " + fromLocation + " toLocation=" + toLocation + " spillState=" +
-                                                        interval.spillState();
-                                        assert isStackSlotValue(toLocation) : "to operand must be a stack slot";
+                                            assert isRegister(fromLocation) : "from operand must be a register but is: " + fromLocation + " toLocation=" + toLocation + " spillState=" +
+                                                            interval.spillState();
+                                            assert isStackSlotValue(toLocation) : "to operand must be a stack slot";
 
-                                        LIRInstruction move = allocator.getSpillMoveFactory().createMove(toLocation, fromLocation);
-                                        insertionBuffer.append(j + 1, move);
+                                            LIRInstruction move = allocator.getSpillMoveFactory().createMove(toLocation, fromLocation);
+                                            insertionBuffer.append(j + 1, move);
 
-                                        if (Debug.isLogEnabled()) {
-                                            Debug.log("inserting move after definition of interval %d to stack slot %s at opId %d", interval.operandNumber, interval.spillSlot(), opId);
+                                            if (Debug.isLogEnabled()) {
+                                                Debug.log("inserting move after definition of interval %d to stack slot %s at opId %d", interval.operandNumber, interval.spillSlot(), opId);
+                                            }
                                         }
                                     }
+                                    interval = interval.next;
                                 }
-                                interval = interval.next;
                             }
                         }
                     } // end of instruction iteration
@@ -202,13 +216,14 @@
         while (temp != TraceInterval.EndMarker) {
             assert temp.spillDefinitionPos() >= 0 : "invalid spill definition pos";
             if (prev != null) {
-                assert temp.from() >= prev.from() : "intervals not sorted";
+                // assert temp.from() >= prev.from() : "intervals not sorted";
                 assert temp.spillDefinitionPos() >= prev.spillDefinitionPos() : "when intervals are sorted by from :  then they must also be sorted by spillDefinitionPos";
             }
 
             assert temp.spillSlot() != null || temp.canMaterialize() : "interval has no spill slot assigned";
             assert temp.spillDefinitionPos() >= temp.from() : "invalid order";
-            assert temp.spillDefinitionPos() <= temp.from() + 2 : "only intervals defined once at their start-pos can be optimized";
+            // assert temp.spillDefinitionPos() <= temp.from() + 2 :
+            // "only intervals defined once at their start-pos can be optimized";
 
             if (Debug.isLogEnabled()) {
                 Debug.log("interval %d (from %d to %d) must be stored at %d", temp.operandNumber, temp.from(), temp.to(), temp.spillDefinitionPos());
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanRegisterAllocationPhase.java	Thu Nov 19 18:09:53 2015 +0100
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanRegisterAllocationPhase.java	Fri Nov 20 11:26:35 2015 +0100
@@ -48,7 +48,7 @@
     private static void allocateRegisters(TraceLinearScan allocator) {
         try (Indent indent = Debug.logAndIndent("allocate registers")) {
             FixedInterval precoloredIntervals = allocator.createFixedUnhandledList();
-            TraceInterval notPrecoloredIntervals = allocator.createUnhandledList(TraceLinearScan.IS_VARIABLE_INTERVAL);
+            TraceInterval notPrecoloredIntervals = allocator.createUnhandledListByFrom(TraceLinearScan.IS_VARIABLE_INTERVAL);
 
             // allocate cpu registers
             TraceLinearScanWalker lsw = new TraceLinearScanWalker(allocator, precoloredIntervals, notPrecoloredIntervals);
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanWalker.java	Thu Nov 19 18:09:53 2015 +0100
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanWalker.java	Fri Nov 20 11:26:35 2015 +0100
@@ -319,16 +319,16 @@
         return optimalSplitPos;
     }
 
+    @SuppressWarnings({"unused"})
     private int findOptimalSplitPos(TraceInterval interval, int minSplitPos, int maxSplitPos, boolean doLoopOptimization) {
-        int optimalSplitPos = findOptimalSplitPos0(interval, minSplitPos, maxSplitPos, doLoopOptimization);
+        int optimalSplitPos = findOptimalSplitPos0(minSplitPos, maxSplitPos);
         if (Debug.isLogEnabled()) {
             Debug.log("optimal split position: %d", optimalSplitPos);
         }
         return optimalSplitPos;
     }
 
-    @SuppressWarnings({"unused"})
-    private int findOptimalSplitPos0(TraceInterval interval, int minSplitPos, int maxSplitPos, boolean doLoopOptimization) {
+    private int findOptimalSplitPos0(int minSplitPos, int maxSplitPos) {
         // TODO (je) implement
         if (minSplitPos == maxSplitPos) {
             // trivial case, no optimization of split position possible
@@ -562,12 +562,20 @@
      *
      * @param spillPos position of the spill
      */
-    @SuppressWarnings("static-method")
     private void changeSpillState(TraceInterval interval, int spillPos) {
         if (TraceLinearScan.Options.LIROptTraceRAEliminateSpillMoves.getValue()) {
             switch (interval.spillState()) {
                 case NoSpillStore:
-                    // It is better to store it to memory at the definition.
+                    final int minSpillPos = interval.spillDefinitionPos();
+                    final int maxSpillPost = spillPos;
+
+                    final int optimalSpillPos = findOptimalSpillPos(minSpillPos, maxSpillPost);
+
+                    // assert !allocator.isBlockBegin(optimalSpillPos);
+                    assert !allocator.isBlockEnd(optimalSpillPos);
+                    assert (optimalSpillPos & 1) == 0 : "Spill pos must be even";
+
+                    interval.setSpillDefinitionPos(optimalSpillPos);
                     interval.setSpillState(SpillState.SpillStore);
                     break;
                 case SpillStore:
@@ -586,6 +594,87 @@
     }
 
     /**
+     * @param minSpillPos minimal spill position
+     * @param maxSpillPos maximal spill position
+     */
+    private int findOptimalSpillPos(int minSpillPos, int maxSpillPos) {
+        // TODO (JE): implement
+        int optimalSpillPos = findOptimalSpillPos0(minSpillPos, maxSpillPos) & (~1);
+        if (Debug.isLogEnabled()) {
+            Debug.log("optimal spill position: %d", optimalSpillPos);
+        }
+        return optimalSpillPos;
+    }
+
+    private int findOptimalSpillPos0(int minSpillPos, int maxSpillPos) {
+        // TODO (je) implement
+        if (minSpillPos == maxSpillPos) {
+            // trivial case, no optimization of split position possible
+            if (Debug.isLogEnabled()) {
+                Debug.log("min-pos and max-pos are equal, no optimization possible");
+            }
+            return minSpillPos;
+
+        }
+        assert minSpillPos < maxSpillPos : "must be true then";
+        assert minSpillPos >= 0 : "cannot access minSplitPos - 1 otherwise";
+
+        AbstractBlockBase<?> minBlock = allocator.blockForId(minSpillPos);
+        AbstractBlockBase<?> maxBlock = allocator.blockForId(maxSpillPos);
+
+        assert minBlock.getLinearScanNumber() <= maxBlock.getLinearScanNumber() : "invalid order";
+        if (minBlock == maxBlock) {
+            // split position cannot be moved to block boundary : so split as late as possible
+            if (Debug.isLogEnabled()) {
+                Debug.log("cannot move split pos to block boundary because minPos and maxPos are in same block");
+            }
+            return maxSpillPos;
+
+        }
+        // search optimal block boundary between minSplitPos and maxSplitPos
+        if (Debug.isLogEnabled()) {
+            Debug.log("moving split pos to optimal block boundary between block B%d and B%d", minBlock.getId(), maxBlock.getId());
+        }
+
+        // currently using the same heuristic as for splitting
+        return findOptimalSpillPos(minBlock, maxBlock, maxSpillPos);
+    }
+
+    private int findOptimalSpillPos(AbstractBlockBase<?> minBlock, AbstractBlockBase<?> maxBlock, int maxSplitPos) {
+        int fromBlockNr = minBlock.getLinearScanNumber();
+        int toBlockNr = maxBlock.getLinearScanNumber();
+
+        assert 0 <= fromBlockNr && fromBlockNr < blockCount() : "out of range";
+        assert 0 <= toBlockNr && toBlockNr < blockCount() : "out of range";
+        assert fromBlockNr < toBlockNr : "must cross block boundary";
+
+        /*
+         * Try to split at end of maxBlock. If this would be after maxSplitPos, then use the begin
+         * of maxBlock. We use last instruction -2 because we want to insert the move before the
+         * block end op.
+         */
+        int optimalSplitPos = allocator.getLastLirInstructionId(maxBlock) - 2;
+        if (optimalSplitPos > maxSplitPos) {
+            optimalSplitPos = allocator.getFirstLirInstructionId(maxBlock);
+        }
+
+        // minimal block probability
+        double minProbability = maxBlock.probability();
+        for (int i = toBlockNr - 1; i >= fromBlockNr; i--) {
+            AbstractBlockBase<?> cur = blockAt(i);
+
+            if (cur.probability() < minProbability) {
+                // Block with lower probability found. Split at the end of this block.
+                minProbability = cur.probability();
+                optimalSplitPos = allocator.getLastLirInstructionId(cur) - 2;
+            }
+        }
+        assert optimalSplitPos > allocator.maxOpId() || allocator.isBlockBegin(optimalSplitPos) || allocator.isBlockEnd(optimalSplitPos + 2) : "algorithm must move split pos to block boundary";
+
+        return optimalSplitPos;
+    }
+
+    /**
      * This is called for every interval that is assigned to a stack slot.
      */
     private static void handleSpillSlot(TraceInterval interval) {