001/*
002 * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
003 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
004 *
005 * This code is free software; you can redistribute it and/or modify it
006 * under the terms of the GNU General Public License version 2 only, as
007 * published by the Free Software Foundation.
008 *
009 * This code is distributed in the hope that it will be useful, but WITHOUT
010 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
011 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
012 * version 2 for more details (a copy is included in the LICENSE file that
013 * accompanied this code).
014 *
015 * You should have received a copy of the GNU General Public License version
016 * 2 along with this work; if not, write to the Free Software Foundation,
017 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
018 *
019 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
020 * or visit www.oracle.com if you need additional information or have any
021 * questions.
022 */
023package com.oracle.graal.lir.alloc.trace;
024
025import static com.oracle.graal.compiler.common.GraalOptions.*;
026import static com.oracle.graal.lir.LIRValueUtil.*;
027import static com.oracle.graal.lir.alloc.trace.TraceRegisterAllocationPhase.Options.*;
028import static jdk.internal.jvmci.code.ValueUtil.*;
029
030import java.util.*;
031
032import jdk.internal.jvmci.code.*;
033import jdk.internal.jvmci.common.*;
034import jdk.internal.jvmci.meta.*;
035
036import com.oracle.graal.compiler.common.alloc.TraceBuilder.TraceBuilderResult;
037import com.oracle.graal.compiler.common.cfg.*;
038import com.oracle.graal.debug.*;
039import com.oracle.graal.lir.*;
040import com.oracle.graal.lir.StandardOp.BlockEndOp;
041import com.oracle.graal.lir.StandardOp.LabelOp;
042import com.oracle.graal.lir.StandardOp.MoveOp;
043import com.oracle.graal.lir.alloc.lsra.*;
044import com.oracle.graal.lir.alloc.lsra.Interval.RegisterPriority;
045import com.oracle.graal.lir.alloc.lsra.Interval.SpillState;
046import com.oracle.graal.lir.alloc.lsra.LinearScan.BlockData;
047import com.oracle.graal.lir.ssi.*;
048
049public class TraceLinearScanLifetimeAnalysisPhase extends LinearScanLifetimeAnalysisPhase {
050
051    private final TraceBuilderResult<?> traceBuilderResult;
052
053    public TraceLinearScanLifetimeAnalysisPhase(LinearScan linearScan, TraceBuilderResult<?> traceBuilderResult) {
054        super(linearScan);
055        this.traceBuilderResult = traceBuilderResult;
056    }
057
058    private boolean sameTrace(AbstractBlockBase<?> a, AbstractBlockBase<?> b) {
059        return traceBuilderResult.getTraceForBlock(b) == traceBuilderResult.getTraceForBlock(a);
060    }
061
062    private boolean isAllocatedOrCurrent(AbstractBlockBase<?> currentBlock, AbstractBlockBase<?> other) {
063        return traceBuilderResult.getTraceForBlock(other) <= traceBuilderResult.getTraceForBlock(currentBlock);
064    }
065
066    static void setHint(final LIRInstruction op, Interval target, Interval source) {
067        Interval currentHint = target.locationHint(false);
068        if (currentHint == null || currentHint.from() > target.from()) {
069            /*
070             * Update hint if there was none or if the hint interval starts after the hinted
071             * interval.
072             */
073            target.setLocationHint(source);
074            if (Debug.isLogEnabled()) {
075                Debug.log("operation at opId %d: added hint from interval %d (%s) to %d (%s)", op.id(), source.operandNumber, source, target.operandNumber, target);
076            }
077        }
078    }
079
080    @Override
081    protected void changeSpillDefinitionPos(LIRInstruction op, AllocatableValue operand, Interval interval, int defPos) {
082        assert interval.isSplitParent() : "can only be called for split parents";
083
084        switch (interval.spillState()) {
085            case NoDefinitionFound:
086                // assert interval.spillDefinitionPos() == -1 : "must no be set before";
087                interval.setSpillDefinitionPos(defPos);
088                if (!(op instanceof LabelOp)) {
089                    // Do not update state for labels. This will be done afterwards.
090                    interval.setSpillState(SpillState.NoSpillStore);
091                }
092                break;
093
094            case NoSpillStore:
095                assert defPos <= interval.spillDefinitionPos() : "positions are processed in reverse order when intervals are created";
096                if (defPos < interval.spillDefinitionPos() - 2) {
097                    // second definition found, so no spill optimization possible for this interval
098                    interval.setSpillState(SpillState.NoOptimization);
099                } else {
100                    // two consecutive definitions (because of two-operand LIR form)
101                    assert allocator.blockForId(defPos) == allocator.blockForId(interval.spillDefinitionPos()) : "block must be equal";
102                }
103                break;
104
105            case NoOptimization:
106                // nothing to do
107                break;
108
109            default:
110                throw new BailoutException("other states not allowed at this time");
111        }
112    }
113
114    @Override
115    protected void buildIntervals() {
116        super.buildIntervals();
117        postBuildIntervals();
118    }
119
120    protected void postBuildIntervals() {
121        // fix spill state for phi/sigma intervals
122        for (Interval interval : allocator.intervals()) {
123            if (interval != null && interval.spillState().equals(SpillState.NoDefinitionFound) && interval.spillDefinitionPos() != -1) {
124                // there was a definition in a phi/sigma
125                interval.setSpillState(SpillState.NoSpillStore);
126            }
127        }
128        if (TraceRAuseInterTraceHints.getValue()) {
129            addInterTraceHints();
130        }
131    }
132
133    private void addInterTraceHints() {
134        // set hints for phi/sigma intervals
135        LIR lir = allocator.getLIR();
136        for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
137            LabelOp label = SSIUtil.incoming(lir, block);
138            for (AbstractBlockBase<?> pred : block.getPredecessors()) {
139                if (isAllocatedOrCurrent(block, pred)) {
140                    BlockEndOp outgoing = SSIUtil.outgoing(lir, pred);
141                    for (int i = 0; i < outgoing.getOutgoingSize(); i++) {
142                        Value toValue = label.getIncomingValue(i);
143                        if (!isIllegal(toValue)) {
144                            Value fromValue = outgoing.getOutgoingValue(i);
145                            assert sameTrace(block, pred) || !isVariable(fromValue) : "Unallocated variable: " + fromValue;
146
147                            if (!isStackSlotValue(fromValue) && !isConstant(fromValue)) {
148                                Interval from = allocator.getOrCreateInterval((AllocatableValue) fromValue);
149                                Interval to = allocator.getOrCreateInterval((AllocatableValue) toValue);
150                                setHint(label, to, from);
151                            }
152                        }
153                    }
154                }
155            }
156        }
157        /*
158         * Set the first range for fixed intervals to [-1, 0] to avoid intersection with incoming
159         * values.
160         */
161        for (Interval interval : allocator.intervals()) {
162            if (interval != null && isRegister(interval.operand)) {
163                Range range = interval.first();
164                if (range == Range.EndMarker) {
165                    interval.addRange(-1, 0);
166                } else if (range.from == 0 && range.to == 1) {
167                    range.from = -1;
168                    range.to = 0;
169                }
170            }
171        }
172    }
173
174    @Override
175    protected RegisterPriority registerPriorityOfOutputOperand(LIRInstruction op) {
176        if (op instanceof LabelOp) {
177            // skip method header
178            return RegisterPriority.None;
179        }
180        return super.registerPriorityOfOutputOperand(op);
181    }
182
183    @Override
184    protected void handleMethodArguments(LIRInstruction op) {
185        if (op instanceof MoveOp) {
186            // do not optimize method arguments
187            return;
188        }
189        super.handleMethodArguments(op);
190    }
191
192    /**
193     * Performs a backward dataflow analysis to compute global live sets (i.e.
194     * {@link BlockData#liveIn} and {@link BlockData#liveOut}) for each block.
195     */
196    @Override
197    protected void computeGlobalLiveSets() {
198        try (Indent indent = Debug.logAndIndent("compute global live sets")) {
199            int numBlocks = allocator.blockCount();
200            boolean changeOccurred;
201            boolean changeOccurredInBlock;
202            int iterationCount = 0;
203            BitSet liveOut = new BitSet(allocator.liveSetSize()); // scratch set for calculations
204
205            /*
206             * Perform a backward dataflow analysis to compute liveOut and liveIn for each block.
207             * The loop is executed until a fixpoint is reached (no changes in an iteration).
208             */
209            do {
210                changeOccurred = false;
211
212                try (Indent indent2 = Debug.logAndIndent("new iteration %d", iterationCount)) {
213
214                    // iterate all blocks in reverse order
215                    for (int i = numBlocks - 1; i >= 0; i--) {
216                        AbstractBlockBase<?> block = allocator.blockAt(i);
217                        BlockData blockSets = allocator.getBlockData(block);
218
219                        changeOccurredInBlock = false;
220
221                        /* liveOut(block) is the union of liveIn(sux), for successors sux of block. */
222                        int n = block.getSuccessorCount();
223                        if (n > 0) {
224                            liveOut.clear();
225                            // block has successors
226                            if (n > 0) {
227                                for (AbstractBlockBase<?> successor : block.getSuccessors()) {
228                                    if (allocator.sortedBlocks().contains(successor)) {
229                                        liveOut.or(allocator.getBlockData(successor).liveIn);
230                                    }
231                                }
232                            }
233
234                            if (!blockSets.liveOut.equals(liveOut)) {
235                                /*
236                                 * A change occurred. Swap the old and new live out sets to avoid
237                                 * copying.
238                                 */
239                                BitSet temp = blockSets.liveOut;
240                                blockSets.liveOut = liveOut;
241                                liveOut = temp;
242
243                                changeOccurred = true;
244                                changeOccurredInBlock = true;
245                            }
246                        }
247
248                        if (iterationCount == 0 || changeOccurredInBlock) {
249                            /*
250                             * liveIn(block) is the union of liveGen(block) with (liveOut(block) &
251                             * !liveKill(block)).
252                             *
253                             * Note: liveIn has to be computed only in first iteration or if liveOut
254                             * has changed!
255                             */
256                            BitSet liveIn = blockSets.liveIn;
257                            liveIn.clear();
258                            liveIn.or(blockSets.liveOut);
259                            liveIn.andNot(blockSets.liveKill);
260                            liveIn.or(blockSets.liveGen);
261
262                            if (Debug.isLogEnabled()) {
263                                Debug.log("block %d: livein = %s,  liveout = %s", block.getId(), liveIn, blockSets.liveOut);
264                            }
265                        }
266                    }
267                    iterationCount++;
268
269                    if (changeOccurred && iterationCount > 50) {
270                        throw new BailoutException("too many iterations in computeGlobalLiveSets");
271                    }
272                }
273            } while (changeOccurred);
274
275            if (DetailedAsserts.getValue()) {
276                verifyLiveness();
277            }
278
279            // check that the liveIn set of the first block is empty
280            AbstractBlockBase<?> startBlock = allocator.blockAt(0);
281            if (allocator.getBlockData(startBlock).liveIn.cardinality() != 0) {
282                if (DetailedAsserts.getValue()) {
283                    reportFailure(numBlocks);
284                }
285                // bailout if this occurs in product mode.
286                throw new JVMCIError("liveIn set of first block must be empty: " + allocator.getBlockData(startBlock).liveIn);
287            }
288        }
289    }
290}