001/* 002 * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved. 003 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 004 * 005 * This code is free software; you can redistribute it and/or modify it 006 * under the terms of the GNU General Public License version 2 only, as 007 * published by the Free Software Foundation. 008 * 009 * This code is distributed in the hope that it will be useful, but WITHOUT 010 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 011 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 012 * version 2 for more details (a copy is included in the LICENSE file that 013 * accompanied this code). 014 * 015 * You should have received a copy of the GNU General Public License version 016 * 2 along with this work; if not, write to the Free Software Foundation, 017 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 018 * 019 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 020 * or visit www.oracle.com if you need additional information or have any 021 * questions. 022 */ 023package com.oracle.graal.lir.alloc.lsra; 024 025import static com.oracle.graal.compiler.common.GraalOptions.*; 026import static com.oracle.graal.lir.LIRValueUtil.*; 027import static jdk.internal.jvmci.code.ValueUtil.*; 028 029import java.util.*; 030 031import jdk.internal.jvmci.code.*; 032import com.oracle.graal.debug.*; 033import jdk.internal.jvmci.meta.*; 034 035import com.oracle.graal.compiler.common.alloc.*; 036import com.oracle.graal.compiler.common.cfg.*; 037import com.oracle.graal.lir.*; 038import com.oracle.graal.lir.StandardOp.MoveOp; 039import com.oracle.graal.lir.alloc.lsra.Interval.SpillState; 040import com.oracle.graal.lir.alloc.lsra.LinearScan.IntervalPredicate; 041import com.oracle.graal.lir.gen.*; 042import com.oracle.graal.lir.gen.LIRGeneratorTool.SpillMoveFactory; 043import com.oracle.graal.lir.phases.*; 044 045public class LinearScanEliminateSpillMovePhase extends AllocationPhase { 046 047 private static final IntervalPredicate mustStoreAtDefinition = new LinearScan.IntervalPredicate() { 048 049 @Override 050 public boolean apply(Interval i) { 051 return i.isSplitParent() && i.spillState() == SpillState.StoreAtDefinition; 052 } 053 }; 054 055 protected final LinearScan allocator; 056 057 protected LinearScanEliminateSpillMovePhase(LinearScan allocator) { 058 this.allocator = allocator; 059 } 060 061 @Override 062 protected <B extends AbstractBlockBase<B>> void run(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder, SpillMoveFactory spillMoveFactory, 063 RegisterAllocationConfig registerAllocationConfig) { 064 eliminateSpillMoves(); 065 } 066 067 /** 068 * @return the index of the first instruction that is of interest for 069 * {@link #eliminateSpillMoves()} 070 */ 071 protected int firstInstructionOfInterest() { 072 // skip the first because it is always a label 073 return 1; 074 } 075 076 // called once before assignment of register numbers 077 void eliminateSpillMoves() { 078 try (Indent indent = Debug.logAndIndent("Eliminating unnecessary spill moves")) { 079 080 /* 081 * collect all intervals that must be stored after their definition. The list is sorted 082 * by Interval.spillDefinitionPos. 083 */ 084 Interval interval; 085 interval = allocator.createUnhandledLists(mustStoreAtDefinition, null).first; 086 if (DetailedAsserts.getValue()) { 087 checkIntervals(interval); 088 } 089 090 LIRInsertionBuffer insertionBuffer = new LIRInsertionBuffer(); 091 for (AbstractBlockBase<?> block : allocator.sortedBlocks()) { 092 try (Indent indent1 = Debug.logAndIndent("Handle %s", block)) { 093 List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block); 094 int numInst = instructions.size(); 095 096 // iterate all instructions of the block. 097 for (int j = firstInstructionOfInterest(); j < numInst; j++) { 098 LIRInstruction op = instructions.get(j); 099 int opId = op.id(); 100 101 if (opId == -1) { 102 MoveOp move = (MoveOp) op; 103 /* 104 * Remove move from register to stack if the stack slot is guaranteed to 105 * be correct. Only moves that have been inserted by LinearScan can be 106 * removed. 107 */ 108 if (canEliminateSpillMove(block, move)) { 109 /* 110 * Move target is a stack slot that is always correct, so eliminate 111 * instruction. 112 */ 113 if (Debug.isLogEnabled()) { 114 Debug.log("eliminating move from interval %d (%s) to %d (%s) in block %s", allocator.operandNumber(move.getInput()), move.getInput(), 115 allocator.operandNumber(move.getResult()), move.getResult(), block); 116 } 117 118 // null-instructions are deleted by assignRegNum 119 instructions.set(j, null); 120 } 121 122 } else { 123 /* 124 * Insert move from register to stack just after the beginning of the 125 * interval. 126 */ 127 assert interval == Interval.EndMarker || interval.spillDefinitionPos() >= opId : "invalid order"; 128 assert interval == Interval.EndMarker || (interval.isSplitParent() && interval.spillState() == SpillState.StoreAtDefinition) : "invalid interval"; 129 130 while (interval != Interval.EndMarker && interval.spillDefinitionPos() == opId) { 131 if (!interval.canMaterialize()) { 132 if (!insertionBuffer.initialized()) { 133 /* 134 * prepare insertion buffer (appended when all instructions 135 * in the block are processed) 136 */ 137 insertionBuffer.init(instructions); 138 } 139 140 AllocatableValue fromLocation = interval.location(); 141 AllocatableValue toLocation = LinearScan.canonicalSpillOpr(interval); 142 if (!fromLocation.equals(toLocation)) { 143 144 assert isRegister(fromLocation) : "from operand must be a register but is: " + fromLocation + " toLocation=" + toLocation + " spillState=" + 145 interval.spillState(); 146 assert isStackSlotValue(toLocation) : "to operand must be a stack slot"; 147 148 LIRInstruction move = allocator.getSpillMoveFactory().createMove(toLocation, fromLocation); 149 insertionBuffer.append(j + 1, move); 150 151 if (Debug.isLogEnabled()) { 152 Debug.log("inserting move after definition of interval %d to stack slot %s at opId %d", interval.operandNumber, interval.spillSlot(), opId); 153 } 154 } 155 } 156 interval = interval.next; 157 } 158 } 159 } // end of instruction iteration 160 161 if (insertionBuffer.initialized()) { 162 insertionBuffer.finish(); 163 } 164 } 165 } // end of block iteration 166 167 assert interval == Interval.EndMarker : "missed an interval"; 168 } 169 } 170 171 /** 172 * @param block The block {@code move} is located in. 173 * @param move Spill move. 174 */ 175 protected boolean canEliminateSpillMove(AbstractBlockBase<?> block, MoveOp move) { 176 assert isVariable(move.getResult()) : "LinearScan inserts only moves to variables: " + move; 177 178 Interval curInterval = allocator.intervalFor(move.getResult()); 179 180 if (!isRegister(curInterval.location()) && curInterval.alwaysInMemory()) { 181 assert isStackSlotValue(curInterval.location()) : "Not a stack slot: " + curInterval.location(); 182 return true; 183 } 184 return false; 185 } 186 187 private static void checkIntervals(Interval interval) { 188 Interval prev = null; 189 Interval temp = interval; 190 while (temp != Interval.EndMarker) { 191 assert temp.spillDefinitionPos() > 0 : "invalid spill definition pos"; 192 if (prev != null) { 193 assert temp.from() >= prev.from() : "intervals not sorted"; 194 assert temp.spillDefinitionPos() >= prev.spillDefinitionPos() : "when intervals are sorted by from : then they must also be sorted by spillDefinitionPos"; 195 } 196 197 assert temp.spillSlot() != null || temp.canMaterialize() : "interval has no spill slot assigned"; 198 assert temp.spillDefinitionPos() >= temp.from() : "invalid order"; 199 assert temp.spillDefinitionPos() <= temp.from() + 2 : "only intervals defined once at their start-pos can be optimized"; 200 201 if (Debug.isLogEnabled()) { 202 Debug.log("interval %d (from %d to %d) must be stored at %d", temp.operandNumber, temp.from(), temp.to(), temp.spillDefinitionPos()); 203 } 204 205 prev = temp; 206 temp = temp.next; 207 } 208 } 209 210}