changeset 23124:23f9a72eb037

TraceRA: move Trace Linear Scan implementation into sub-package.
author Josef Eisl <josef.eisl@jku.at>
date Mon, 30 Nov 2015 17:18:36 +0100
parents fa5100c27dac
children fe57bf1e3595
files graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/FixedInterval.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/FixedRange.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/IntervalHint.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/RegisterVerifier.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceGlobalMoveResolver.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceInterval.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceIntervalDumper.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceIntervalWalker.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScan.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanAllocationPhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanAssignLocationsPhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanEliminateSpillMovePhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanLifetimeAnalysisPhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanRegisterAllocationPhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanResolveDataFlowPhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanWalker.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLocalMoveResolver.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceRegisterAllocationPhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceUtil.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/UsePosList.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/FixedInterval.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/FixedRange.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/IntervalHint.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/RegisterVerifier.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceInterval.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceIntervalDumper.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceIntervalWalker.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScan.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanAllocationPhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanAssignLocationsPhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanEliminateSpillMovePhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanLifetimeAnalysisPhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanRegisterAllocationPhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanResolveDataFlowPhase.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanWalker.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLocalMoveResolver.java graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/UsePosList.java
diffstat 37 files changed, 6631 insertions(+), 6628 deletions(-) [+]
line wrap: on
line diff
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/FixedInterval.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,310 +0,0 @@
-/*
- * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import static jdk.vm.ci.code.ValueUtil.asRegister;
-import static jdk.vm.ci.code.ValueUtil.isRegister;
-import jdk.vm.ci.meta.AllocatableValue;
-import jdk.vm.ci.meta.Value;
-
-import com.oracle.graal.lir.LIRInstruction;
-
-/**
- * Represents a fixed interval.
- */
-public final class FixedInterval extends IntervalHint {
-
-    static final class FixedList {
-
-        public FixedInterval fixed;
-
-        public FixedList(FixedInterval fixed) {
-            this.fixed = fixed;
-        }
-
-        /**
-         * Gets the fixed list.
-         */
-        public FixedInterval getFixed() {
-            return fixed;
-        }
-
-        /**
-         * Sets the fixed list.
-         */
-        public void setFixed(FixedInterval list) {
-            fixed = list;
-        }
-
-        /**
-         * Adds an interval to a list sorted by {@linkplain FixedInterval#currentFrom() current
-         * from} positions.
-         *
-         * @param interval the interval to add
-         */
-        public void addToListSortedByCurrentFromPositions(FixedInterval interval) {
-            FixedInterval list = getFixed();
-            FixedInterval prev = null;
-            FixedInterval cur = list;
-            while (cur.currentFrom() < interval.currentFrom()) {
-                prev = cur;
-                cur = cur.next;
-            }
-            FixedInterval result = list;
-            if (prev == null) {
-                // add to head of list
-                result = interval;
-            } else {
-                // add before 'cur'
-                prev.next = interval;
-            }
-            interval.next = cur;
-            setFixed(result);
-        }
-
-    }
-
-    /**
-     * The fixed operand of this interval.
-     */
-    public final AllocatableValue operand;
-
-    /**
-     * The head of the list of ranges describing this interval. This list is sorted by
-     * {@linkplain LIRInstruction#id instruction ids}.
-     */
-    private FixedRange first;
-
-    /**
-     * Iterator used to traverse the ranges of an interval.
-     */
-    private FixedRange current;
-
-    /**
-     * Link to next interval in a sorted list of intervals that ends with {@link #EndMarker}.
-     */
-    FixedInterval next;
-
-    private int cachedTo; // cached value: to of last range (-1: not cached)
-
-    public FixedRange first() {
-        return first;
-    }
-
-    @Override
-    public int from() {
-        return first.from;
-    }
-
-    public int to() {
-        if (cachedTo == -1) {
-            cachedTo = calcTo();
-        }
-        assert cachedTo == calcTo() : "invalid cached value";
-        return cachedTo;
-    }
-
-    // test intersection
-    boolean intersects(TraceInterval i) {
-        return first.intersects(i);
-    }
-
-    int intersectsAt(TraceInterval i) {
-        return first.intersectsAt(i);
-    }
-
-    // range iteration
-    void rewindRange() {
-        current = first;
-    }
-
-    void nextRange() {
-        assert this != EndMarker : "not allowed on sentinel";
-        current = current.next;
-    }
-
-    int currentFrom() {
-        return current.from;
-    }
-
-    int currentTo() {
-        return current.to;
-    }
-
-    boolean currentAtEnd() {
-        return current == FixedRange.EndMarker;
-    }
-
-    boolean currentIntersects(TraceInterval it) {
-        return current.intersects(it);
-    }
-
-    int currentIntersectsAt(TraceInterval it) {
-        return current.intersectsAt(it);
-    }
-
-    // range creation
-    public void setFrom(int from) {
-        assert !isEmpty();
-        first().from = from;
-    }
-
-    private boolean isEmpty() {
-        return first() == FixedRange.EndMarker;
-    }
-
-    public void addRange(int from, int to) {
-        if (isEmpty()) {
-            first = new FixedRange(from, to, first());
-            return;
-        }
-        if (to <= to() && from >= from()) {
-            return;
-        }
-        if (from() == to) {
-            first().from = from;
-        } else {
-            first = new FixedRange(from, to, first());
-        }
-    }
-
-    @Override
-    public AllocatableValue location() {
-        return operand;
-    }
-
-    /**
-     * Sentinel interval to denote the end of an interval list.
-     */
-    static final FixedInterval EndMarker = new FixedInterval(Value.ILLEGAL);
-
-    FixedInterval(AllocatableValue operand) {
-        assert operand != null;
-        this.operand = operand;
-        this.first = FixedRange.EndMarker;
-        this.current = FixedRange.EndMarker;
-        this.next = FixedInterval.EndMarker;
-        this.cachedTo = -1;
-    }
-
-    int calcTo() {
-        assert first != FixedRange.EndMarker : "interval has no range";
-
-        FixedRange r = first;
-        while (r.next != FixedRange.EndMarker) {
-            r = r.next;
-        }
-        return r.to;
-    }
-
-    // returns true if the opId is inside the interval
-    boolean covers(int opId, LIRInstruction.OperandMode mode) {
-        FixedRange cur = first;
-
-        while (cur != FixedRange.EndMarker && cur.to < opId) {
-            cur = cur.next;
-        }
-        if (cur != FixedRange.EndMarker) {
-            assert cur.to != cur.next.from : "ranges not separated";
-
-            if (mode == LIRInstruction.OperandMode.DEF) {
-                return cur.from <= opId && opId < cur.to;
-            } else {
-                return cur.from <= opId && opId <= cur.to;
-            }
-        }
-        return false;
-    }
-
-    // returns true if the interval has any hole between holeFrom and holeTo
-    // (even if the hole has only the length 1)
-    boolean hasHoleBetween(int holeFrom, int holeTo) {
-        assert holeFrom < holeTo : "check";
-        assert from() <= holeFrom && holeTo <= to() : "index out of interval";
-
-        FixedRange cur = first;
-        while (cur != FixedRange.EndMarker) {
-            assert cur.to < cur.next.from : "no space between ranges";
-
-            // hole-range starts before this range . hole
-            if (holeFrom < cur.from) {
-                return true;
-
-                // hole-range completely inside this range . no hole
-            } else {
-                if (holeTo <= cur.to) {
-                    return false;
-
-                    // overlapping of hole-range with this range . hole
-                } else {
-                    if (holeFrom <= cur.to) {
-                        return true;
-                    }
-                }
-            }
-
-            cur = cur.next;
-        }
-
-        return false;
-    }
-
-    @Override
-    public String toString() {
-        String from = "?";
-        String to = "?";
-        if (first != null && first != FixedRange.EndMarker) {
-            from = String.valueOf(from());
-            // to() may cache a computed value, modifying the current object, which is a bad idea
-            // for a printing function. Compute it directly instead.
-            to = String.valueOf(calcTo());
-        }
-        String locationString = "@" + this.operand;
-        return asRegister(operand).number + ":" + operand + (isRegister(operand) ? "" : locationString) + "[" + from + "," + to + "]";
-    }
-
-    /**
-     * Gets a single line string for logging the details of this interval to a log stream.
-     */
-    @Override
-    public String logString(TraceLinearScan allocator) {
-        StringBuilder buf = new StringBuilder(100);
-        buf.append("fix ").append(asRegister(operand).number).append(':').append(operand).append(' ');
-
-        buf.append(" ranges{");
-
-        // print ranges
-        FixedRange cur = first;
-        while (cur != FixedRange.EndMarker) {
-            if (cur != first) {
-                buf.append(", ");
-            }
-            buf.append(cur);
-            cur = cur.next;
-            assert cur != null : "range list not closed with range sentinel";
-        }
-        buf.append("}");
-        return buf.toString();
-    }
-
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/FixedRange.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,109 +0,0 @@
-/*
- * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-/**
- * Represents a range of integers from a start (inclusive) to an end (exclusive).
- */
-public final class FixedRange {
-
-    public static final FixedRange EndMarker = new FixedRange(Integer.MAX_VALUE, Integer.MAX_VALUE, null);
-
-    /**
-     * The start of the range, inclusive.
-     */
-    public int from;
-
-    /**
-     * The end of the range, exclusive.
-     */
-    public int to;
-
-    /**
-     * A link to allow the range to be put into a singly linked list.
-     */
-    public FixedRange next;
-
-    boolean intersects(TraceInterval i) {
-        return intersectsAt(i) != -1;
-    }
-
-    /**
-     * Creates a new range.
-     *
-     * @param from the start of the range, inclusive
-     * @param to the end of the range, exclusive
-     * @param next link to the next range in a linked list
-     */
-    FixedRange(int from, int to, FixedRange next) {
-        this.from = from;
-        this.to = to;
-        this.next = next;
-    }
-
-    int intersectsAt(TraceInterval other) {
-        FixedRange range = this;
-        assert other != null : "null ranges not allowed";
-        assert range != EndMarker && other != TraceInterval.EndMarker : "empty ranges not allowed";
-        int intervalFrom = other.from();
-        int intervalTo = other.to();
-
-        do {
-            if (range.from < intervalFrom) {
-                if (range.to <= intervalFrom) {
-                    range = range.next;
-                    if (range == EndMarker) {
-                        return -1;
-                    }
-                } else {
-                    return intervalFrom;
-                }
-            } else {
-                if (intervalFrom < range.from) {
-                    if (intervalTo <= range.from) {
-                        return -1;
-                    }
-                    return range.from;
-                } else {
-                    assert range.from == intervalFrom;
-                    if (range.from == range.to) {
-                        range = range.next;
-                        if (range == EndMarker) {
-                            return -1;
-                        }
-                    } else {
-                        if (intervalFrom == intervalTo) {
-                            return -1;
-                        }
-                        return range.from;
-                    }
-                }
-            }
-        } while (true);
-    }
-
-    @Override
-    public String toString() {
-        return "[" + from + ", " + to + "]";
-    }
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/IntervalHint.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,37 +0,0 @@
-/*
- * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import jdk.vm.ci.meta.AllocatableValue;
-
-/**
- * An interval that is a hint for an {@code TraceInterval interval}.
- */
-abstract class IntervalHint {
-
-    public abstract AllocatableValue location();
-
-    public abstract int from();
-
-    public abstract String logString(TraceLinearScan allocator);
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/RegisterVerifier.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,264 +0,0 @@
-/*
- * Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import static jdk.vm.ci.code.ValueUtil.asRegister;
-import static jdk.vm.ci.code.ValueUtil.isRegister;
-
-import java.util.ArrayList;
-import java.util.EnumSet;
-import java.util.List;
-
-import jdk.vm.ci.code.Register;
-import jdk.vm.ci.common.JVMCIError;
-import jdk.vm.ci.meta.Value;
-
-import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
-import com.oracle.graal.compiler.common.util.ArrayMap;
-import com.oracle.graal.debug.Debug;
-import com.oracle.graal.debug.Debug.Scope;
-import com.oracle.graal.debug.Indent;
-import com.oracle.graal.lir.InstructionValueConsumer;
-import com.oracle.graal.lir.LIRInstruction;
-import com.oracle.graal.lir.LIRInstruction.OperandFlag;
-import com.oracle.graal.lir.LIRInstruction.OperandMode;
-
-/**
- */
-final class RegisterVerifier {
-
-    TraceLinearScan allocator;
-    List<AbstractBlockBase<?>> workList; // all blocks that must be processed
-    ArrayMap<TraceInterval[]> savedStates; // saved information of previous check
-
-    // simplified access to methods of LinearScan
-    TraceInterval intervalAt(Value operand) {
-        return allocator.intervalFor(operand);
-    }
-
-    // currently, only registers are processed
-    int stateSize() {
-        return allocator.numRegisters();
-    }
-
-    // accessors
-    TraceInterval[] stateForBlock(AbstractBlockBase<?> block) {
-        return savedStates.get(block.getId());
-    }
-
-    void setStateForBlock(AbstractBlockBase<?> block, TraceInterval[] savedState) {
-        savedStates.put(block.getId(), savedState);
-    }
-
-    void addToWorkList(AbstractBlockBase<?> block) {
-        if (!workList.contains(block)) {
-            workList.add(block);
-        }
-    }
-
-    RegisterVerifier(TraceLinearScan allocator) {
-        this.allocator = allocator;
-        workList = new ArrayList<>(16);
-        this.savedStates = new ArrayMap<>();
-
-    }
-
-    @SuppressWarnings("try")
-    void verify(AbstractBlockBase<?> start) {
-        try (Scope s = Debug.scope("RegisterVerifier")) {
-            // setup input registers (method arguments) for first block
-            TraceInterval[] inputState = new TraceInterval[stateSize()];
-            setStateForBlock(start, inputState);
-            addToWorkList(start);
-
-            // main loop for verification
-            do {
-                AbstractBlockBase<?> block = workList.get(0);
-                workList.remove(0);
-
-                processBlock(block);
-            } while (!workList.isEmpty());
-        }
-    }
-
-    @SuppressWarnings("try")
-    private void processBlock(AbstractBlockBase<?> block) {
-        try (Indent indent = Debug.logAndIndent("processBlock B%d", block.getId())) {
-            // must copy state because it is modified
-            TraceInterval[] inputState = copy(stateForBlock(block));
-
-            try (Indent indent2 = Debug.logAndIndent("Input-State of intervals:")) {
-                printState(inputState);
-            }
-
-            // process all operations of the block
-            processOperations(block, inputState);
-
-            try (Indent indent2 = Debug.logAndIndent("Output-State of intervals:")) {
-                printState(inputState);
-            }
-
-            // iterate all successors
-            for (AbstractBlockBase<?> succ : block.getSuccessors()) {
-                processSuccessor(succ, inputState);
-            }
-        }
-    }
-
-    protected void printState(TraceInterval[] inputState) {
-        for (int i = 0; i < stateSize(); i++) {
-            Register reg = allocator.getRegisters()[i];
-            assert reg.number == i;
-            if (inputState[i] != null) {
-                Debug.log(" %6s %4d  --  %s", reg, inputState[i].operandNumber, inputState[i]);
-            } else {
-                Debug.log(" %6s   __", reg);
-            }
-        }
-    }
-
-    private void processSuccessor(AbstractBlockBase<?> block, TraceInterval[] inputState) {
-        TraceInterval[] savedState = stateForBlock(block);
-
-        if (savedState != null) {
-            // this block was already processed before.
-            // check if new inputState is consistent with savedState
-
-            boolean savedStateCorrect = true;
-            for (int i = 0; i < stateSize(); i++) {
-                if (inputState[i] != savedState[i]) {
-                    // current inputState and previous savedState assume a different
-                    // interval in this register . assume that this register is invalid
-                    if (savedState[i] != null) {
-                        // invalidate old calculation only if it assumed that
-                        // register was valid. when the register was already invalid,
-                        // then the old calculation was correct.
-                        savedStateCorrect = false;
-                        savedState[i] = null;
-
-                        Debug.log("processSuccessor B%d: invalidating slot %d", block.getId(), i);
-                    }
-                }
-            }
-
-            if (savedStateCorrect) {
-                // already processed block with correct inputState
-                Debug.log("processSuccessor B%d: previous visit already correct", block.getId());
-            } else {
-                // must re-visit this block
-                Debug.log("processSuccessor B%d: must re-visit because input state changed", block.getId());
-                addToWorkList(block);
-            }
-
-        } else {
-            // block was not processed before, so set initial inputState
-            Debug.log("processSuccessor B%d: initial visit", block.getId());
-
-            setStateForBlock(block, copy(inputState));
-            addToWorkList(block);
-        }
-    }
-
-    static TraceInterval[] copy(TraceInterval[] inputState) {
-        return inputState.clone();
-    }
-
-    static void statePut(TraceInterval[] inputState, Value location, TraceInterval interval) {
-        if (location != null && isRegister(location)) {
-            Register reg = asRegister(location);
-            int regNum = reg.number;
-            if (interval != null) {
-                Debug.log("%s = %s", reg, interval.operand);
-            } else if (inputState[regNum] != null) {
-                Debug.log("%s = null", reg);
-            }
-
-            inputState[regNum] = interval;
-        }
-    }
-
-    static boolean checkState(AbstractBlockBase<?> block, LIRInstruction op, TraceInterval[] inputState, Value operand, Value reg, TraceInterval interval) {
-        if (reg != null && isRegister(reg)) {
-            if (inputState[asRegister(reg).number] != interval) {
-                throw new JVMCIError(
-                                "Error in register allocation: operation (%s) in block %s expected register %s (operand %s) to contain the value of interval %s but data-flow says it contains interval %s",
-                                op, block, reg, operand, interval, inputState[asRegister(reg).number]);
-            }
-        }
-        return true;
-    }
-
-    void processOperations(AbstractBlockBase<?> block, final TraceInterval[] inputState) {
-        List<LIRInstruction> ops = allocator.getLIR().getLIRforBlock(block);
-        InstructionValueConsumer useConsumer = new InstructionValueConsumer() {
-
-            @Override
-            public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) {
-                // we skip spill moves inserted by the spill position optimization
-                if (TraceLinearScan.isVariableOrRegister(operand) && allocator.isProcessed(operand) && op.id() != TraceLinearScan.DOMINATOR_SPILL_MOVE_ID) {
-                    TraceInterval interval = intervalAt(operand);
-                    if (op.id() != -1) {
-                        interval = interval.getSplitChildAtOpId(op.id(), mode, allocator);
-                    }
-
-                    assert checkState(block, op, inputState, interval.operand, interval.location(), interval.splitParent());
-                }
-            }
-        };
-
-        InstructionValueConsumer defConsumer = (op, operand, mode, flags) -> {
-            if (TraceLinearScan.isVariableOrRegister(operand) && allocator.isProcessed(operand)) {
-                TraceInterval interval = intervalAt(operand);
-                if (op.id() != -1) {
-                    interval = interval.getSplitChildAtOpId(op.id(), mode, allocator);
-                }
-
-                statePut(inputState, interval.location(), interval.splitParent());
-            }
-        };
-
-        // visit all instructions of the block
-        for (int i = 0; i < ops.size(); i++) {
-            final LIRInstruction op = ops.get(i);
-
-            if (Debug.isLogEnabled()) {
-                Debug.log("%s", op.toStringWithIdPrefix());
-            }
-
-            // check if input operands are correct
-            op.visitEachInput(useConsumer);
-            // invalidate all caller save registers at calls
-            if (op.destroysCallerSavedRegisters()) {
-                for (Register r : allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters()) {
-                    statePut(inputState, r.asValue(), null);
-                }
-            }
-            op.visitEachAlive(useConsumer);
-            // set temp operands (some operations use temp operands also as output operands, so
-            // can't set them null)
-            op.visitEachTemp(defConsumer);
-            // set output operands
-            op.visitEachOutput(defConsumer);
-        }
-    }
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceGlobalMoveResolver.java	Mon Nov 30 15:07:18 2015 +0100
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceGlobalMoveResolver.java	Mon Nov 30 17:18:36 2015 +0100
@@ -229,8 +229,7 @@
     }
 
     /**
-     * Checks if the {@linkplain TraceInterval#location() location} of {@code to} is not blocked or
-     * is only blocked by {@code from}.
+     * Checks if {@code to} is not blocked or is only blocked by {@code from}.
      */
     private boolean safeToProcessMove(Value fromLocation, Value toLocation) {
         if (mightBeBlocked(toLocation)) {
@@ -318,8 +317,8 @@
     }
 
     /**
-     * @param fromOpr {@link TraceInterval#operand operand} of the {@code from} interval
-     * @param toOpr {@link TraceInterval#operand operand} of the {@code to} interval
+     * @param fromOpr Operand of the {@code from} interval
+     * @param toOpr Operand of the {@code to} interval
      */
     private LIRInstruction createMove(Value fromOpr, AllocatableValue toOpr) {
         if (isStackSlotValue(toOpr) && isStackSlotValue(fromOpr)) {
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceInterval.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1091 +0,0 @@
-/*
- * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import static com.oracle.graal.compiler.common.GraalOptions.DetailedAsserts;
-import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
-import static com.oracle.graal.lir.LIRValueUtil.isVariable;
-import static com.oracle.graal.lir.LIRValueUtil.isVirtualStackSlot;
-import static jdk.vm.ci.code.ValueUtil.asRegister;
-import static jdk.vm.ci.code.ValueUtil.isIllegal;
-import static jdk.vm.ci.code.ValueUtil.isRegister;
-import static jdk.vm.ci.code.ValueUtil.isStackSlot;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.List;
-
-import jdk.vm.ci.code.BailoutException;
-import jdk.vm.ci.code.RegisterValue;
-import jdk.vm.ci.code.StackSlot;
-import jdk.vm.ci.common.JVMCIError;
-import jdk.vm.ci.meta.AllocatableValue;
-import jdk.vm.ci.meta.JavaConstant;
-import jdk.vm.ci.meta.LIRKind;
-import jdk.vm.ci.meta.Value;
-
-import com.oracle.graal.compiler.common.util.Util;
-import com.oracle.graal.debug.TTY;
-import com.oracle.graal.lir.LIRInstruction;
-import com.oracle.graal.lir.Variable;
-
-/**
- * Represents an interval in the {@linkplain TraceLinearScan linear scan register allocator}.
- */
-final class TraceInterval extends IntervalHint {
-
-    static final class AnyList {
-
-        /**
-         * List of intervals whose binding is currently {@link RegisterBinding#Any}.
-         */
-        public TraceInterval any;
-
-        public AnyList(TraceInterval any) {
-            this.any = any;
-        }
-
-        /**
-         * Gets the any list.
-         */
-        public TraceInterval getAny() {
-            return any;
-        }
-
-        /**
-         * Sets the any list.
-         */
-        public void setAny(TraceInterval list) {
-            any = list;
-        }
-
-        /**
-         * Adds an interval to a list sorted by {@linkplain TraceInterval#from() current from}
-         * positions.
-         *
-         * @param interval the interval to add
-         */
-        public void addToListSortedByFromPositions(TraceInterval interval) {
-            TraceInterval list = getAny();
-            TraceInterval prev = null;
-            TraceInterval cur = list;
-            while (cur.from() < interval.from()) {
-                prev = cur;
-                cur = cur.next;
-            }
-            TraceInterval result = list;
-            if (prev == null) {
-                // add to head of list
-                result = interval;
-            } else {
-                // add before 'cur'
-                prev.next = interval;
-            }
-            interval.next = cur;
-            setAny(result);
-        }
-
-        /**
-         * Adds an interval to a list sorted by {@linkplain TraceInterval#from() start} positions
-         * and {@linkplain TraceInterval#firstUsage(RegisterPriority) first usage} positions.
-         *
-         * @param interval the interval to add
-         */
-        public void addToListSortedByStartAndUsePositions(TraceInterval interval) {
-            TraceInterval list = getAny();
-            TraceInterval prev = null;
-            TraceInterval cur = list;
-            while (cur.from() < interval.from() || (cur.from() == interval.from() && cur.firstUsage(RegisterPriority.None) < interval.firstUsage(RegisterPriority.None))) {
-                prev = cur;
-                cur = cur.next;
-            }
-            if (prev == null) {
-                list = interval;
-            } else {
-                prev.next = interval;
-            }
-            interval.next = cur;
-            setAny(list);
-        }
-
-        /**
-         * Removes an interval from a list.
-         *
-         * @param i the interval to remove
-         */
-        public void removeAny(TraceInterval i) {
-            TraceInterval list = getAny();
-            TraceInterval prev = null;
-            TraceInterval cur = list;
-            while (cur != i) {
-                assert cur != null && cur != TraceInterval.EndMarker : "interval has not been found in list: " + i;
-                prev = cur;
-                cur = cur.next;
-            }
-            if (prev == null) {
-                setAny(cur.next);
-            } else {
-                prev.next = cur.next;
-            }
-        }
-    }
-
-    /**
-     * Constants denoting the register usage priority for an interval. The constants are declared in
-     * increasing order of priority are are used to optimize spilling when multiple overlapping
-     * intervals compete for limited registers.
-     */
-    public enum RegisterPriority {
-        /**
-         * No special reason for an interval to be allocated a register.
-         */
-        None,
-
-        /**
-         * Priority level for intervals live at the end of a loop.
-         */
-        LiveAtLoopEnd,
-
-        /**
-         * Priority level for intervals that should be allocated to a register.
-         */
-        ShouldHaveRegister,
-
-        /**
-         * Priority level for intervals that must be allocated to a register.
-         */
-        MustHaveRegister;
-
-        public static final RegisterPriority[] VALUES = values();
-
-        /**
-         * Determines if this priority is higher than or equal to a given priority.
-         */
-        public boolean greaterEqual(RegisterPriority other) {
-            return ordinal() >= other.ordinal();
-        }
-
-        /**
-         * Determines if this priority is lower than a given priority.
-         */
-        public boolean lessThan(RegisterPriority other) {
-            return ordinal() < other.ordinal();
-        }
-
-        public CharSequence shortName() {
-            return name().subSequence(0, 1);
-        }
-    }
-
-    /**
-     * Constants denoting whether an interval is bound to a specific register. This models platform
-     * dependencies on register usage for certain instructions.
-     */
-    enum RegisterBinding {
-        /**
-         * Interval is bound to a specific register as required by the platform.
-         */
-        Fixed,
-
-        /**
-         * Interval has no specific register requirements.
-         */
-        Any,
-
-        /**
-         * Interval is bound to a stack slot.
-         */
-        Stack;
-
-        public static final RegisterBinding[] VALUES = values();
-    }
-
-    /**
-     * Constants denoting the linear-scan states an interval may be in with respect to the
-     * {@linkplain TraceInterval#from() start} {@code position} of the interval being processed.
-     */
-    enum State {
-        /**
-         * An interval that starts after {@code position}.
-         */
-        Unhandled,
-
-        /**
-         * An interval that {@linkplain TraceInterval#covers covers} {@code position} and has an
-         * assigned register.
-         */
-        Active,
-
-        /**
-         * An interval that starts before and ends after {@code position} but does not
-         * {@linkplain TraceInterval#covers cover} it due to a lifetime hole.
-         */
-        Inactive,
-
-        /**
-         * An interval that ends before {@code position} or is spilled to memory.
-         */
-        Handled;
-    }
-
-    /**
-     * Constants used in optimization of spilling of an interval.
-     */
-    public enum SpillState {
-        /**
-         * Starting state of calculation: no definition found yet.
-         */
-        NoDefinitionFound,
-
-        /**
-         * One definition has already been found. Two consecutive definitions are treated as one
-         * (e.g. a consecutive move and add because of two-operand LIR form). The position of this
-         * definition is given by {@link TraceInterval#spillDefinitionPos()}.
-         */
-        NoSpillStore,
-
-        /**
-         * A spill move has already been inserted.
-         */
-        SpillStore,
-
-        /**
-         * The interval starts in memory (e.g. method parameter), so a store is never necessary.
-         */
-        StartInMemory,
-
-        /**
-         * The interval has more than one definition (e.g. resulting from phi moves), so stores to
-         * memory are not optimized.
-         */
-        NoOptimization;
-
-        public static final EnumSet<SpillState> IN_MEMORY = EnumSet.of(SpillStore, StartInMemory);
-    }
-
-    /**
-     * The {@linkplain RegisterValue register} or {@linkplain Variable variable} for this interval
-     * prior to register allocation.
-     */
-    public final AllocatableValue operand;
-
-    /**
-     * The operand number for this interval's {@linkplain #operand operand}.
-     */
-    public final int operandNumber;
-
-    /**
-     * The {@linkplain RegisterValue register} or {@linkplain StackSlot spill slot} assigned to this
-     * interval. In case of a spilled interval which is re-materialized this is
-     * {@link Value#ILLEGAL}.
-     */
-    private AllocatableValue location;
-
-    /**
-     * The stack slot to which all splits of this interval are spilled if necessary.
-     */
-    private AllocatableValue spillSlot;
-
-    /**
-     * The kind of this interval.
-     */
-    private LIRKind kind;
-
-    /**
-     * The start of the range, inclusive.
-     */
-    public int intFrom;
-
-    /**
-     * The end of the range, exclusive.
-     */
-    public int intTo;
-
-    /**
-     * List of (use-positions, register-priorities) pairs, sorted by use-positions.
-     */
-    private UsePosList usePosList;
-
-    /**
-     * Link to next interval in a sorted list of intervals that ends with {@link #EndMarker}.
-     */
-    TraceInterval next;
-
-    /**
-     * The linear-scan state of this interval.
-     */
-    State state;
-
-    /**
-     * The interval from which this one is derived. If this is a {@linkplain #isSplitParent() split
-     * parent}, it points to itself.
-     */
-    private TraceInterval splitParent;
-
-    /**
-     * List of all intervals that are split off from this interval. This is only used if this is a
-     * {@linkplain #isSplitParent() split parent}.
-     */
-    private List<TraceInterval> splitChildren = Collections.emptyList();
-
-    /**
-     * Current split child that has been active or inactive last (always stored in split parents).
-     */
-    private TraceInterval currentSplitChild;
-
-    /**
-     * Specifies if move is inserted between currentSplitChild and this interval when interval gets
-     * active the first time.
-     */
-    private boolean insertMoveWhenActivated;
-
-    /**
-     * For spill move optimization.
-     */
-    private SpillState spillState;
-
-    /**
-     * Position where this interval is defined (if defined only once).
-     */
-    private int spillDefinitionPos;
-
-    /**
-     * This interval should be assigned the same location as the hint interval.
-     */
-    private IntervalHint locationHint;
-
-    /**
-     * The value with which a spilled child interval can be re-materialized. Currently this must be
-     * a Constant.
-     */
-    private JavaConstant materializedValue;
-
-    /**
-     * The number of times {@link #addMaterializationValue(JavaConstant)} is called.
-     */
-    private int numMaterializationValuesAdded;
-
-    void assignLocation(AllocatableValue newLocation) {
-        if (isRegister(newLocation)) {
-            assert this.location == null : "cannot re-assign location for " + this;
-            if (newLocation.getLIRKind().equals(LIRKind.Illegal) && !kind.equals(LIRKind.Illegal)) {
-                this.location = asRegister(newLocation).asValue(kind);
-                return;
-            }
-        } else if (isIllegal(newLocation)) {
-            assert canMaterialize();
-        } else {
-            assert this.location == null || isRegister(this.location) || (isVirtualStackSlot(this.location) && isStackSlot(newLocation)) : "cannot re-assign location for " + this;
-            assert isStackSlotValue(newLocation);
-            assert !newLocation.getLIRKind().equals(LIRKind.Illegal);
-            assert newLocation.getLIRKind().equals(this.kind);
-        }
-        this.location = newLocation;
-    }
-
-    /**
-     * Gets the {@linkplain RegisterValue register} or {@linkplain StackSlot spill slot} assigned to
-     * this interval.
-     */
-    @Override
-    public AllocatableValue location() {
-        return location;
-    }
-
-    public LIRKind kind() {
-        assert !isRegister(operand) : "cannot access type for fixed interval";
-        return kind;
-    }
-
-    public void setKind(LIRKind kind) {
-        assert isRegister(operand) || this.kind().equals(LIRKind.Illegal) || this.kind().equals(kind) : "overwriting existing type";
-        this.kind = kind;
-    }
-
-    public boolean isEmpty() {
-        return intFrom == Integer.MAX_VALUE && intTo == Integer.MAX_VALUE;
-    }
-
-    public void setTo(int pos) {
-        assert intFrom == Integer.MAX_VALUE || intFrom < pos;
-        intTo = pos;
-    }
-
-    public void setFrom(int pos) {
-        assert intTo == Integer.MAX_VALUE || pos < intTo;
-        intFrom = pos;
-    }
-
-    @Override
-    public int from() {
-        return intFrom;
-    }
-
-    int to() {
-        return intTo;
-    }
-
-    int numUsePositions() {
-        return usePosList.size();
-    }
-
-    public void setLocationHint(IntervalHint interval) {
-        locationHint = interval;
-    }
-
-    public boolean isSplitParent() {
-        return splitParent == this;
-    }
-
-    boolean isSplitChild() {
-        return splitParent != this;
-    }
-
-    /**
-     * Gets the split parent for this interval.
-     */
-    public TraceInterval splitParent() {
-        assert splitParent.isSplitParent() : "not a split parent: " + this;
-        return splitParent;
-    }
-
-    /**
-     * Gets the canonical spill slot for this interval.
-     */
-    public AllocatableValue spillSlot() {
-        return splitParent().spillSlot;
-    }
-
-    public void setSpillSlot(AllocatableValue slot) {
-        assert isStackSlotValue(slot);
-        assert splitParent().spillSlot == null || (isVirtualStackSlot(splitParent().spillSlot) && isStackSlot(slot)) : "connot overwrite existing spill slot";
-        splitParent().spillSlot = slot;
-    }
-
-    TraceInterval currentSplitChild() {
-        return splitParent().currentSplitChild;
-    }
-
-    void makeCurrentSplitChild() {
-        splitParent().currentSplitChild = this;
-    }
-
-    boolean insertMoveWhenActivated() {
-        return insertMoveWhenActivated;
-    }
-
-    void setInsertMoveWhenActivated(boolean b) {
-        insertMoveWhenActivated = b;
-    }
-
-    // for spill optimization
-    public SpillState spillState() {
-        return splitParent().spillState;
-    }
-
-    public int spillDefinitionPos() {
-        return splitParent().spillDefinitionPos;
-    }
-
-    public void setSpillState(SpillState state) {
-        assert state.ordinal() >= spillState().ordinal() : "state cannot decrease";
-        splitParent().spillState = state;
-    }
-
-    public void setSpillDefinitionPos(int pos) {
-        assert spillState() == SpillState.NoDefinitionFound || spillState() == SpillState.NoSpillStore || spillDefinitionPos() == -1 : "cannot set the position twice";
-        int to = to();
-        assert pos < to : String.format("Cannot spill %s at %d", this, pos);
-        splitParent().spillDefinitionPos = pos;
-    }
-
-    /**
-     * Returns true if this interval has a shadow copy on the stack that is correct after
-     * {@code opId}.
-     */
-    public boolean inMemoryAt(int opId) {
-        SpillState spillSt = spillState();
-        return spillSt == SpillState.StartInMemory || (spillSt == SpillState.SpillStore && opId > spillDefinitionPos() && !canMaterialize());
-    }
-
-    void removeFirstUsePos() {
-        usePosList.removeLowestUsePos();
-    }
-
-    // test intersection
-    boolean intersects(TraceInterval i) {
-        return intersectsAt(i) != -1;
-    }
-
-    int intersectsAt(TraceInterval i) {
-        TraceInterval i1;
-        TraceInterval i2;
-        if (i.from() < this.from()) {
-            i1 = i;
-            i2 = this;
-        } else {
-            i1 = this;
-            i2 = i;
-        }
-        assert i1.from() <= i2.from();
-
-        if (i1.to() <= i2.from()) {
-            return -1;
-        }
-        return i2.from();
-    }
-
-    /**
-     * Sentinel interval to denote the end of an interval list.
-     */
-    static final TraceInterval EndMarker = new TraceInterval(Value.ILLEGAL, -1);
-
-    TraceInterval(AllocatableValue operand, int operandNumber) {
-        assert operand != null;
-        this.operand = operand;
-        this.operandNumber = operandNumber;
-        if (isRegister(operand)) {
-            location = operand;
-        } else {
-            assert isIllegal(operand) || isVariable(operand);
-        }
-        this.kind = LIRKind.Illegal;
-        this.intFrom = Integer.MAX_VALUE;
-        this.intTo = Integer.MAX_VALUE;
-        this.usePosList = new UsePosList(4);
-        this.next = EndMarker;
-        this.spillState = SpillState.NoDefinitionFound;
-        this.spillDefinitionPos = -1;
-        splitParent = this;
-        currentSplitChild = this;
-    }
-
-    /**
-     * Sets the value which is used for re-materialization.
-     */
-    public void addMaterializationValue(JavaConstant value) {
-        if (numMaterializationValuesAdded == 0) {
-            materializedValue = value;
-        } else {
-            // Interval is defined on multiple places -> no materialization is possible.
-            materializedValue = null;
-        }
-        numMaterializationValuesAdded++;
-    }
-
-    /**
-     * Returns true if this interval can be re-materialized when spilled. This means that no
-     * spill-moves are needed. Instead of restore-moves the {@link #materializedValue} is restored.
-     */
-    public boolean canMaterialize() {
-        return getMaterializedValue() != null;
-    }
-
-    /**
-     * Returns a value which can be moved to a register instead of a restore-move from stack.
-     */
-    public JavaConstant getMaterializedValue() {
-        return splitParent().materializedValue;
-    }
-
-    // consistency check of split-children
-    boolean checkSplitChildren() {
-        if (!splitChildren.isEmpty()) {
-            assert isSplitParent() : "only split parents can have children";
-
-            for (int i = 0; i < splitChildren.size(); i++) {
-                TraceInterval i1 = splitChildren.get(i);
-
-                assert i1.splitParent() == this : "not a split child of this interval";
-                assert i1.kind().equals(kind()) : "must be equal for all split children";
-                assert (i1.spillSlot() == null && spillSlot == null) || i1.spillSlot().equals(spillSlot()) : "must be equal for all split children";
-
-                for (int j = i + 1; j < splitChildren.size(); j++) {
-                    TraceInterval i2 = splitChildren.get(j);
-
-                    assert !i1.operand.equals(i2.operand) : "same register number";
-
-                    if (i1.from() < i2.from()) {
-                        assert i1.to() <= i2.from() && i1.to() < i2.to() : "intervals overlapping";
-                    } else {
-                        assert i2.from() < i1.from() : "intervals start at same opId";
-                        assert i2.to() <= i1.from() && i2.to() < i1.to() : "intervals overlapping";
-                    }
-                }
-            }
-        }
-
-        return true;
-    }
-
-    public IntervalHint locationHint(boolean searchSplitChild) {
-        if (!searchSplitChild) {
-            return locationHint;
-        }
-
-        if (locationHint != null) {
-            assert !(locationHint instanceof TraceInterval) || ((TraceInterval) locationHint).isSplitParent() : "ony split parents are valid hint registers";
-
-            if (locationHint.location() != null && isRegister(locationHint.location())) {
-                return locationHint;
-            } else if (locationHint instanceof TraceInterval) {
-                TraceInterval hint = (TraceInterval) locationHint;
-                if (!hint.splitChildren.isEmpty()) {
-                    // search the first split child that has a register assigned
-                    int len = hint.splitChildren.size();
-                    for (int i = 0; i < len; i++) {
-                        TraceInterval interval = hint.splitChildren.get(i);
-                        if (interval.location != null && isRegister(interval.location)) {
-                            return interval;
-                        }
-                    }
-                }
-            }
-        }
-
-        // no hint interval found that has a register assigned
-        return null;
-    }
-
-    TraceInterval getSplitChildAtOpId(int opId, LIRInstruction.OperandMode mode, TraceLinearScan allocator) {
-        assert isSplitParent() : "can only be called for split parents";
-        assert opId >= 0 : "invalid opId (method cannot be called for spill moves)";
-
-        if (splitChildren.isEmpty()) {
-            assert this.covers(opId, mode) : this + " does not cover " + opId;
-            return this;
-        } else {
-            TraceInterval result = null;
-            int len = splitChildren.size();
-
-            // in outputMode, the end of the interval (opId == cur.to()) is not valid
-            int toOffset = (mode == LIRInstruction.OperandMode.DEF ? 0 : 1);
-
-            int i;
-            for (i = 0; i < len; i++) {
-                TraceInterval cur = splitChildren.get(i);
-                if (cur.from() <= opId && opId < cur.to() + toOffset) {
-                    if (i > 0) {
-                        // exchange current split child to start of list (faster access for next
-                        // call)
-                        Util.atPutGrow(splitChildren, i, splitChildren.get(0), null);
-                        Util.atPutGrow(splitChildren, 0, cur, null);
-                    }
-
-                    // interval found
-                    result = cur;
-                    break;
-                }
-            }
-
-            assert checkSplitChild(result, opId, allocator, toOffset, mode);
-            return result;
-        }
-    }
-
-    private boolean checkSplitChild(TraceInterval result, int opId, TraceLinearScan allocator, int toOffset, LIRInstruction.OperandMode mode) {
-        if (result == null) {
-            // this is an error
-            StringBuilder msg = new StringBuilder(this.toString()).append(" has no child at ").append(opId);
-            if (!splitChildren.isEmpty()) {
-                TraceInterval firstChild = splitChildren.get(0);
-                TraceInterval lastChild = splitChildren.get(splitChildren.size() - 1);
-                msg.append(" (first = ").append(firstChild).append(", last = ").append(lastChild).append(")");
-            }
-            throw new JVMCIError("Linear Scan Error: %s", msg);
-        }
-
-        if (!splitChildren.isEmpty()) {
-            for (TraceInterval interval : splitChildren) {
-                if (interval != result && interval.from() <= opId && opId < interval.to() + toOffset) {
-                    TTY.println(String.format("two valid result intervals found for opId %d: %d and %d", opId, result.operandNumber, interval.operandNumber));
-                    TTY.println(result.logString(allocator));
-                    TTY.println(interval.logString(allocator));
-                    throw new BailoutException("two valid result intervals found");
-                }
-            }
-        }
-        assert result.covers(opId, mode) : "opId not covered by interval";
-        return true;
-    }
-
-    // returns the interval that covers the given opId or null if there is none
-    TraceInterval getIntervalCoveringOpId(int opId) {
-        assert opId >= 0 : "invalid opId";
-        assert opId < to() : "can only look into the past";
-
-        if (opId >= from()) {
-            return this;
-        }
-
-        TraceInterval parent = splitParent();
-        TraceInterval result = null;
-
-        assert !parent.splitChildren.isEmpty() : "no split children available";
-        int len = parent.splitChildren.size();
-
-        for (int i = len - 1; i >= 0; i--) {
-            TraceInterval cur = parent.splitChildren.get(i);
-            if (cur.from() <= opId && opId < cur.to()) {
-                assert result == null : "covered by multiple split children " + result + " and " + cur;
-                result = cur;
-            }
-        }
-
-        return result;
-    }
-
-    // returns the last split child that ends before the given opId
-    TraceInterval getSplitChildBeforeOpId(int opId) {
-        assert opId >= 0 : "invalid opId";
-
-        TraceInterval parent = splitParent();
-        TraceInterval result = null;
-
-        assert !parent.splitChildren.isEmpty() : "no split children available";
-        int len = parent.splitChildren.size();
-
-        for (int i = len - 1; i >= 0; i--) {
-            TraceInterval cur = parent.splitChildren.get(i);
-            if (cur.to() <= opId && (result == null || result.to() < cur.to())) {
-                result = cur;
-            }
-        }
-
-        assert result != null : "no split child found";
-        return result;
-    }
-
-    // checks if opId is covered by any split child
-    boolean splitChildCovers(int opId, LIRInstruction.OperandMode mode) {
-        assert isSplitParent() : "can only be called for split parents";
-        assert opId >= 0 : "invalid opId (method can not be called for spill moves)";
-
-        if (splitChildren.isEmpty()) {
-            // simple case if interval was not split
-            return covers(opId, mode);
-
-        } else {
-            // extended case: check all split children
-            int len = splitChildren.size();
-            for (int i = 0; i < len; i++) {
-                TraceInterval cur = splitChildren.get(i);
-                if (cur.covers(opId, mode)) {
-                    return true;
-                }
-            }
-            return false;
-        }
-    }
-
-    private RegisterPriority adaptPriority(RegisterPriority priority) {
-        /*
-         * In case of re-materialized values we require that use-operands are registers, because we
-         * don't have the value in a stack location. (Note that ShouldHaveRegister means that the
-         * operand can also be a StackSlot).
-         */
-        if (priority == RegisterPriority.ShouldHaveRegister && canMaterialize()) {
-            return RegisterPriority.MustHaveRegister;
-        }
-        return priority;
-    }
-
-    // Note: use positions are sorted descending . first use has highest index
-    int firstUsage(RegisterPriority minRegisterPriority) {
-        assert isVariable(operand) : "cannot access use positions for fixed intervals";
-
-        for (int i = usePosList.size() - 1; i >= 0; --i) {
-            RegisterPriority registerPriority = adaptPriority(usePosList.registerPriority(i));
-            if (registerPriority.greaterEqual(minRegisterPriority)) {
-                return usePosList.usePos(i);
-            }
-        }
-        return Integer.MAX_VALUE;
-    }
-
-    int nextUsage(RegisterPriority minRegisterPriority, int from) {
-        assert isVariable(operand) : "cannot access use positions for fixed intervals";
-
-        for (int i = usePosList.size() - 1; i >= 0; --i) {
-            int usePos = usePosList.usePos(i);
-            if (usePos >= from && adaptPriority(usePosList.registerPriority(i)).greaterEqual(minRegisterPriority)) {
-                return usePos;
-            }
-        }
-        return Integer.MAX_VALUE;
-    }
-
-    int nextUsageExact(RegisterPriority exactRegisterPriority, int from) {
-        assert isVariable(operand) : "cannot access use positions for fixed intervals";
-
-        for (int i = usePosList.size() - 1; i >= 0; --i) {
-            int usePos = usePosList.usePos(i);
-            if (usePos >= from && adaptPriority(usePosList.registerPriority(i)) == exactRegisterPriority) {
-                return usePos;
-            }
-        }
-        return Integer.MAX_VALUE;
-    }
-
-    int previousUsage(RegisterPriority minRegisterPriority, int from) {
-        assert isVariable(operand) : "cannot access use positions for fixed intervals";
-
-        int prev = -1;
-        for (int i = usePosList.size() - 1; i >= 0; --i) {
-            int usePos = usePosList.usePos(i);
-            if (usePos > from) {
-                return prev;
-            }
-            if (adaptPriority(usePosList.registerPriority(i)).greaterEqual(minRegisterPriority)) {
-                prev = usePos;
-            }
-        }
-        return prev;
-    }
-
-    public void addUsePos(int pos, RegisterPriority registerPriority) {
-        assert isEmpty() || covers(pos, LIRInstruction.OperandMode.USE) : String.format("use position %d not covered by live range of interval %s", pos, this);
-
-        // do not add use positions for precolored intervals because they are never used
-        if (registerPriority != RegisterPriority.None && isVariable(operand)) {
-            if (DetailedAsserts.getValue()) {
-                for (int i = 0; i < usePosList.size(); i++) {
-                    assert pos <= usePosList.usePos(i) : "already added a use-position with lower position";
-                    if (i > 0) {
-                        assert usePosList.usePos(i) < usePosList.usePos(i - 1) : "not sorted descending";
-                    }
-                }
-            }
-
-            // Note: addUse is called in descending order, so list gets sorted
-            // automatically by just appending new use positions
-            int len = usePosList.size();
-            if (len == 0 || usePosList.usePos(len - 1) > pos) {
-                usePosList.add(pos, registerPriority);
-            } else if (usePosList.registerPriority(len - 1).lessThan(registerPriority)) {
-                assert usePosList.usePos(len - 1) == pos : "list not sorted correctly";
-                usePosList.setRegisterPriority(len - 1, registerPriority);
-            }
-        }
-    }
-
-    public void addRange(int from, int to) {
-        assert from < to : "invalid range";
-
-        if (from < intFrom) {
-            setFrom(from);
-        }
-        if (intTo == Integer.MAX_VALUE || intTo < to) {
-            setTo(to);
-        }
-    }
-
-    TraceInterval newSplitChild(TraceLinearScan allocator) {
-        // allocate new interval
-        TraceInterval parent = splitParent();
-        TraceInterval result = allocator.createDerivedInterval(parent);
-        result.setKind(kind());
-
-        result.splitParent = parent;
-        result.setLocationHint(parent);
-
-        // insert new interval in children-list of parent
-        if (parent.splitChildren.isEmpty()) {
-            assert isSplitParent() : "list must be initialized at first split";
-
-            // Create new non-shared list
-            parent.splitChildren = new ArrayList<>(4);
-            parent.splitChildren.add(this);
-        }
-        parent.splitChildren.add(result);
-
-        return result;
-    }
-
-    /**
-     * Splits this interval at a specified position and returns the remainder as a new <i>child</i>
-     * interval of this interval's {@linkplain #splitParent() parent} interval.
-     * <p>
-     * When an interval is split, a bi-directional link is established between the original
-     * <i>parent</i> interval and the <i>children</i> intervals that are split off this interval.
-     * When a split child is split again, the new created interval is a direct child of the original
-     * parent. That is, there is no tree of split children stored, just a flat list. All split
-     * children are spilled to the same {@linkplain #spillSlot spill slot}.
-     *
-     * @param splitPos the position at which to split this interval
-     * @param allocator the register allocator context
-     * @return the child interval split off from this interval
-     */
-    TraceInterval split(int splitPos, TraceLinearScan allocator) {
-        assert isVariable(operand) : "cannot split fixed intervals";
-
-        // allocate new interval
-        TraceInterval result = newSplitChild(allocator);
-
-        // split the ranges
-        result.setTo(intTo);
-        result.setFrom(splitPos);
-        intTo = splitPos;
-
-        // split list of use positions
-        result.usePosList = usePosList.splitAt(splitPos);
-
-        if (DetailedAsserts.getValue()) {
-            for (int i = 0; i < usePosList.size(); i++) {
-                assert usePosList.usePos(i) < splitPos;
-            }
-            for (int i = 0; i < result.usePosList.size(); i++) {
-                assert result.usePosList.usePos(i) >= splitPos;
-            }
-        }
-        return result;
-    }
-
-    // returns true if the opId is inside the interval
-    boolean covers(int opId, LIRInstruction.OperandMode mode) {
-        if (mode == LIRInstruction.OperandMode.DEF) {
-            return from() <= opId && opId < to();
-        }
-        return from() <= opId && opId <= to();
-    }
-
-    @Override
-    public String toString() {
-        String from = "?";
-        String to = "?";
-        if (!isEmpty()) {
-            from = String.valueOf(from());
-            to = String.valueOf(to());
-        }
-        String locationString = this.location == null ? "" : "@" + this.location;
-        return operandNumber + ":" + operand + (isRegister(operand) ? "" : locationString) + "[" + from + "," + to + "]";
-    }
-
-    /**
-     * Gets the use position information for this interval.
-     */
-    public UsePosList usePosList() {
-        return usePosList;
-    }
-
-    /**
-     * Gets a single line string for logging the details of this interval to a log stream.
-     *
-     * @param allocator the register allocator context
-     */
-    @Override
-    public String logString(TraceLinearScan allocator) {
-        StringBuilder buf = new StringBuilder(100);
-        buf.append("any ").append(operandNumber).append(':').append(operand).append(' ');
-        if (!isRegister(operand)) {
-            if (location != null) {
-                buf.append("location{").append(location).append("} ");
-            }
-        }
-
-        buf.append("hints{").append(splitParent.operandNumber);
-        IntervalHint hint = locationHint(false);
-        if (hint != null) {
-            buf.append(", ").append(hint.location());
-        }
-        buf.append("} ranges{");
-
-        // print range
-        buf.append("[" + from() + ", " + to() + "]");
-        buf.append("} uses{");
-
-        // print use positions
-        int prev = -1;
-        for (int i = usePosList.size() - 1; i >= 0; --i) {
-            assert prev < usePosList.usePos(i) : "use positions not sorted";
-            if (i != usePosList.size() - 1) {
-                buf.append(", ");
-            }
-            buf.append(usePosList.usePos(i)).append(':').append(usePosList.registerPriority(i).shortName());
-            prev = usePosList.usePos(i);
-        }
-        buf.append("} spill-state{").append(spillState()).append("}");
-        if (canMaterialize()) {
-            buf.append(" (remat:").append(getMaterializedValue().toString()).append(")");
-        }
-        return buf.toString();
-    }
-
-    List<TraceInterval> getSplitChildren() {
-        return Collections.unmodifiableList(splitChildren);
-    }
-
-    boolean isFixedInterval() {
-        return isRegister(operand);
-    }
-
-    private static boolean isDefinitionPosition(int usePos) {
-        return (usePos & 1) == 1;
-    }
-
-    int currentFrom(int currentPosition) {
-        assert isFixedInterval();
-        for (int i = 0; i < usePosList.size(); i++) {
-            int usePos = usePosList.usePos(i);
-            if (usePos <= currentPosition && isDefinitionPosition(usePos)) {
-                return usePos;
-            }
-
-        }
-        return Integer.MAX_VALUE;
-    }
-
-    int currentIntersectsAt(int currentPosition, TraceInterval current) {
-        assert isFixedInterval();
-        assert !current.isFixedInterval();
-        int from = Integer.MAX_VALUE;
-        int to = Integer.MIN_VALUE;
-
-        for (int i = 0; i < usePosList.size(); i++) {
-            int usePos = usePosList.usePos(i);
-            if (isDefinitionPosition(usePos)) {
-                if (usePos <= currentPosition) {
-                    from = usePos;
-                    break;
-                }
-                to = Integer.MIN_VALUE;
-            } else {
-                if (to < usePos) {
-                    to = usePos;
-                }
-            }
-        }
-        if (from < current.from()) {
-            if (to <= current.from()) {
-                return -1;
-            }
-            return current.from();
-        } else {
-            if (current.to() <= from) {
-                return -1;
-            }
-            return from;
-        }
-    }
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceIntervalDumper.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,93 +0,0 @@
-/*
- * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import static jdk.vm.ci.code.ValueUtil.isRegister;
-import jdk.vm.ci.meta.AllocatableValue;
-import jdk.vm.ci.meta.Value;
-
-import com.oracle.graal.lir.debug.IntervalDumper;
-
-final class TraceIntervalDumper implements IntervalDumper {
-    private final FixedInterval[] fixedIntervals;
-    private final TraceInterval[] intervals;
-
-    public TraceIntervalDumper(FixedInterval[] fixedIntervals, TraceInterval[] intervals) {
-        this.fixedIntervals = fixedIntervals;
-        this.intervals = intervals;
-    }
-
-    public void visitIntervals(IntervalVisitor visitor) {
-        for (FixedInterval interval : fixedIntervals) {
-            if (interval != null) {
-                printFixedInterval(interval, visitor);
-            }
-        }
-        for (TraceInterval interval : intervals) {
-            if (interval != null) {
-                printInterval(interval, visitor);
-            }
-        }
-    }
-
-    private static void printFixedInterval(FixedInterval interval, IntervalVisitor visitor) {
-        Value hint = null;
-        AllocatableValue operand = interval.operand;
-        String type = "fixed";
-        char typeChar = operand.getPlatformKind().getTypeChar();
-        visitor.visitIntervalStart(operand, operand, operand, hint, type, typeChar);
-
-        // print ranges
-        for (FixedRange range = interval.first(); range != FixedRange.EndMarker; range = range.next) {
-            visitor.visitRange(range.from, range.to);
-        }
-
-        // no use positions
-
-        visitor.visitIntervalEnd("NOT_SUPPORTED");
-
-    }
-
-    private static void printInterval(TraceInterval interval, IntervalVisitor visitor) {
-        Value hint = interval.locationHint(false) != null ? interval.locationHint(false).location() : null;
-        AllocatableValue operand = interval.operand;
-        String type = isRegister(operand) ? "fixed" : operand.getLIRKind().getPlatformKind().toString();
-        char typeChar = operand.getPlatformKind().getTypeChar();
-        visitor.visitIntervalStart(interval.splitParent().operand, operand, interval.location(), hint, type, typeChar);
-
-        // print ranges
-        visitor.visitRange(interval.from(), interval.to());
-
-        // print use positions
-        int prev = -1;
-        UsePosList usePosList = interval.usePosList();
-        for (int i = usePosList.size() - 1; i >= 0; --i) {
-            assert prev < usePosList.usePos(i) : "use positions not sorted";
-            visitor.visitUsePos(usePosList.usePos(i), usePosList.registerPriority(i));
-            prev = usePosList.usePos(i);
-        }
-
-        visitor.visitIntervalEnd(interval.spillState());
-    }
-
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceIntervalWalker.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,323 +0,0 @@
-/*
- * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import jdk.vm.ci.common.JVMCIError;
-
-import com.oracle.graal.debug.Debug;
-import com.oracle.graal.debug.Indent;
-import com.oracle.graal.lir.alloc.trace.FixedInterval.FixedList;
-import com.oracle.graal.lir.alloc.trace.TraceInterval.AnyList;
-import com.oracle.graal.lir.alloc.trace.TraceInterval.RegisterBinding;
-import com.oracle.graal.lir.alloc.trace.TraceInterval.State;
-
-/**
- */
-class TraceIntervalWalker {
-
-    protected final TraceLinearScan allocator;
-
-    /**
-     * Sorted list of intervals, not live before the current position.
-     */
-    protected AnyList unhandledAnyList;
-
-    /**
-     * Sorted list of intervals, live at the current position.
-     */
-    protected AnyList activeAnyList;
-    protected FixedList activeFixedList;
-
-    /**
-     * Sorted list of intervals in a life time hole at the current position.
-     */
-    protected FixedList inactiveFixedList;
-
-    /**
-     * The current position (intercept point through the intervals).
-     */
-    protected int currentPosition;
-
-    /**
-     * Processes the {@code currentInterval} interval in an attempt to allocate a physical register
-     * to it and thus allow it to be moved to a list of {@linkplain #activeAnyList active}
-     * intervals.
-     *
-     * @param currentInterval The interval to be activated.
-     *
-     * @return {@code true} if a register was allocated to the {@code currentInterval} interval
-     */
-    protected boolean activateCurrent(TraceInterval currentInterval) {
-        if (Debug.isLogEnabled()) {
-            logCurrentStatus();
-        }
-        return true;
-    }
-
-    @SuppressWarnings("try")
-    protected void logCurrentStatus() {
-        try (Indent i = Debug.logAndIndent("active:")) {
-            logList(activeFixedList.getFixed());
-            logList(activeAnyList.getAny());
-        }
-        try (Indent i = Debug.logAndIndent("inactive(fixed):")) {
-            logList(inactiveFixedList.getFixed());
-        }
-    }
-
-    private void logList(FixedInterval i) {
-        for (FixedInterval interval = i; interval != FixedInterval.EndMarker; interval = interval.next) {
-            Debug.log("%s", interval.logString(allocator));
-        }
-    }
-
-    private void logList(TraceInterval i) {
-        for (TraceInterval interval = i; interval != TraceInterval.EndMarker; interval = interval.next) {
-            Debug.log("%s", interval.logString(allocator));
-        }
-    }
-
-    void walkBefore(int lirOpId) {
-        walkTo(lirOpId - 1);
-    }
-
-    void walk() {
-        walkTo(Integer.MAX_VALUE);
-    }
-
-    /**
-     * Creates a new interval walker.
-     *
-     * @param allocator the register allocator context
-     * @param unhandledFixed the list of unhandled {@linkplain RegisterBinding#Fixed fixed}
-     *            intervals
-     * @param unhandledAny the list of unhandled {@linkplain RegisterBinding#Any non-fixed}
-     *            intervals
-     */
-    TraceIntervalWalker(TraceLinearScan allocator, FixedInterval unhandledFixed, TraceInterval unhandledAny) {
-        this.allocator = allocator;
-
-        unhandledAnyList = new AnyList(unhandledAny);
-        activeAnyList = new AnyList(TraceInterval.EndMarker);
-        activeFixedList = new FixedList(FixedInterval.EndMarker);
-        // we don't need a separate unhandled list for fixed.
-        inactiveFixedList = new FixedList(unhandledFixed);
-        currentPosition = -1;
-    }
-
-    protected void removeFromList(TraceInterval interval) {
-        if (interval.state == State.Active) {
-            activeAnyList.removeAny(interval);
-        } else {
-            assert interval.state == State.Inactive : "invalid state";
-            // inactiveAnyLists.removeAny(interval);
-            throw JVMCIError.shouldNotReachHere();
-        }
-    }
-
-    /**
-     * Walks up to {@code from} and updates the state of {@link FixedInterval fixed intervals}.
-     *
-     * Fixed intervals can switch back and forth between the states {@link State#Active} and
-     * {@link State#Inactive} (and eventually to {@link State#Handled} but handled intervals are not
-     * managed).
-     */
-    @SuppressWarnings("try")
-    private void walkToFixed(State state, int from) {
-        assert state == State.Active || state == State.Inactive : "wrong state";
-        FixedInterval prevprev = null;
-        FixedInterval prev = (state == State.Active) ? activeFixedList.getFixed() : inactiveFixedList.getFixed();
-        FixedInterval next = prev;
-        if (Debug.isLogEnabled()) {
-            try (Indent i = Debug.logAndIndent("walkToFixed(%s, %d):", state, from)) {
-                logList(next);
-            }
-        }
-        while (next.currentFrom() <= from) {
-            FixedInterval cur = next;
-            next = cur.next;
-
-            boolean rangeHasChanged = false;
-            while (cur.currentTo() <= from) {
-                cur.nextRange();
-                rangeHasChanged = true;
-            }
-
-            // also handle move from inactive list to active list
-            rangeHasChanged = rangeHasChanged || (state == State.Inactive && cur.currentFrom() <= from);
-
-            if (rangeHasChanged) {
-                // remove cur from list
-                if (prevprev == null) {
-                    if (state == State.Active) {
-                        activeFixedList.setFixed(next);
-                    } else {
-                        inactiveFixedList.setFixed(next);
-                    }
-                } else {
-                    prevprev.next = next;
-                }
-                prev = next;
-                TraceInterval.State newState;
-                if (cur.currentAtEnd()) {
-                    // move to handled state (not maintained as a list)
-                    newState = State.Handled;
-                } else {
-                    if (cur.currentFrom() <= from) {
-                        // sort into active list
-                        activeFixedList.addToListSortedByCurrentFromPositions(cur);
-                        newState = State.Active;
-                    } else {
-                        // sort into inactive list
-                        inactiveFixedList.addToListSortedByCurrentFromPositions(cur);
-                        newState = State.Inactive;
-                    }
-                    if (prev == cur) {
-                        assert state == newState;
-                        prevprev = prev;
-                        prev = cur.next;
-                    }
-                }
-                intervalMoved(cur, state, newState);
-            } else {
-                prevprev = prev;
-                prev = cur.next;
-            }
-        }
-    }
-
-    /**
-     * Walks up to {@code from} and updates the state of {@link TraceInterval intervals}.
-     *
-     * Trace intervals can switch once from {@link State#Unhandled} to {@link State#Active} and then
-     * to {@link State#Handled} but handled intervals are not managed.
-     */
-    @SuppressWarnings("try")
-    private void walkToAny(int from) {
-        TraceInterval prevprev = null;
-        TraceInterval prev = activeAnyList.getAny();
-        TraceInterval next = prev;
-        if (Debug.isLogEnabled()) {
-            try (Indent i = Debug.logAndIndent("walkToAny(%d):", from)) {
-                logList(next);
-            }
-        }
-        while (next.from() <= from) {
-            TraceInterval cur = next;
-            next = cur.next;
-
-            if (cur.to() <= from) {
-                // remove cur from list
-                if (prevprev == null) {
-                    activeAnyList.setAny(next);
-                } else {
-                    prevprev.next = next;
-                }
-                intervalMoved(cur, State.Active, State.Handled);
-            } else {
-                prevprev = prev;
-            }
-            prev = next;
-        }
-    }
-
-    /**
-     * Get the next interval from {@linkplain #unhandledAnyList} which starts before or at
-     * {@code toOpId}. The returned interval is removed.
-     *
-     * @postcondition all intervals in {@linkplain #unhandledAnyList} start after {@code toOpId}.
-     *
-     * @return The next interval or null if there is no {@linkplain #unhandledAnyList unhandled}
-     *         interval at position {@code toOpId}.
-     */
-    private TraceInterval nextInterval(int toOpId) {
-        TraceInterval any = unhandledAnyList.getAny();
-
-        if (any != TraceInterval.EndMarker) {
-            TraceInterval currentInterval = unhandledAnyList.getAny();
-            if (toOpId < currentInterval.from()) {
-                return null;
-            }
-
-            unhandledAnyList.setAny(currentInterval.next);
-            currentInterval.next = TraceInterval.EndMarker;
-            return currentInterval;
-        }
-        return null;
-
-    }
-
-    /**
-     * Walk up to {@code toOpId}.
-     *
-     * @postcondition {@link #currentPosition} is set to {@code toOpId}, {@link #activeFixedList}
-     *                and {@link #inactiveFixedList} are populated and {@link TraceInterval#state}s
-     *                are up to date.
-     */
-    @SuppressWarnings("try")
-    protected void walkTo(int toOpId) {
-        assert currentPosition <= toOpId : "can not walk backwards";
-        for (TraceInterval currentInterval = nextInterval(toOpId); currentInterval != null; currentInterval = nextInterval(toOpId)) {
-            int opId = currentInterval.from();
-
-            // set currentPosition prior to call of walkTo
-            currentPosition = opId;
-
-            // update unhandled stack intervals
-            // updateUnhandledStackIntervals(opId);
-
-            // call walkTo even if currentPosition == id
-            walkToFixed(State.Active, opId);
-            walkToFixed(State.Inactive, opId);
-            walkToAny(opId);
-
-            try (Indent indent = Debug.logAndIndent("walk to op %d", opId)) {
-                currentInterval.state = State.Active;
-                if (activateCurrent(currentInterval)) {
-                    activeAnyList.addToListSortedByFromPositions(currentInterval);
-                    intervalMoved(currentInterval, State.Unhandled, State.Active);
-                }
-            }
-        }
-        // set currentPosition prior to call of walkTo
-        currentPosition = toOpId;
-
-        if (currentPosition <= allocator.maxOpId()) {
-            // update unhandled stack intervals
-            // updateUnhandledStackIntervals(toOpId);
-
-            // call walkTo if still in range
-            walkToFixed(State.Active, toOpId);
-            walkToFixed(State.Inactive, toOpId);
-            walkToAny(toOpId);
-        }
-    }
-
-    private void intervalMoved(IntervalHint interval, State from, State to) {
-        // intervalMoved() is called whenever an interval moves from one interval list to another.
-        // In the implementation of this method it is prohibited to move the interval to any list.
-        if (Debug.isLogEnabled()) {
-            Debug.log("interval moved from %s to %s: %s", from, to, interval.logString(allocator));
-        }
-    }
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScan.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1044 +0,0 @@
-/*
- * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import static com.oracle.graal.compiler.common.GraalOptions.DetailedAsserts;
-import static com.oracle.graal.lir.LIRValueUtil.isVariable;
-import static jdk.vm.ci.code.CodeUtil.isEven;
-import static jdk.vm.ci.code.ValueUtil.asRegister;
-import static jdk.vm.ci.code.ValueUtil.asRegisterValue;
-import static jdk.vm.ci.code.ValueUtil.isIllegal;
-import static jdk.vm.ci.code.ValueUtil.isLegal;
-import static jdk.vm.ci.code.ValueUtil.isRegister;
-
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.EnumSet;
-import java.util.List;
-
-import jdk.vm.ci.code.BailoutException;
-import jdk.vm.ci.code.Register;
-import jdk.vm.ci.code.RegisterAttributes;
-import jdk.vm.ci.code.RegisterValue;
-import jdk.vm.ci.code.TargetDescription;
-import jdk.vm.ci.common.JVMCIError;
-import jdk.vm.ci.meta.AllocatableValue;
-import jdk.vm.ci.meta.LIRKind;
-import jdk.vm.ci.meta.Value;
-import jdk.vm.ci.options.NestedBooleanOptionValue;
-import jdk.vm.ci.options.Option;
-import jdk.vm.ci.options.OptionType;
-import jdk.vm.ci.options.OptionValue;
-
-import com.oracle.graal.compiler.common.alloc.RegisterAllocationConfig;
-import com.oracle.graal.compiler.common.alloc.TraceBuilder.TraceBuilderResult;
-import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
-import com.oracle.graal.compiler.common.cfg.BlockMap;
-import com.oracle.graal.debug.Debug;
-import com.oracle.graal.debug.Debug.Scope;
-import com.oracle.graal.debug.Indent;
-import com.oracle.graal.lir.LIR;
-import com.oracle.graal.lir.LIRInstruction;
-import com.oracle.graal.lir.LIRInstruction.OperandFlag;
-import com.oracle.graal.lir.LIRInstruction.OperandMode;
-import com.oracle.graal.lir.StandardOp.BlockEndOp;
-import com.oracle.graal.lir.ValueConsumer;
-import com.oracle.graal.lir.Variable;
-import com.oracle.graal.lir.VirtualStackSlot;
-import com.oracle.graal.lir.alloc.trace.TraceLinearScanAllocationPhase.TraceLinearScanAllocationContext;
-import com.oracle.graal.lir.framemap.FrameMapBuilder;
-import com.oracle.graal.lir.gen.LIRGenerationResult;
-import com.oracle.graal.lir.gen.LIRGeneratorTool.MoveFactory;
-import com.oracle.graal.lir.phases.LIRPhase;
-
-/**
- * An implementation of the linear scan register allocator algorithm described in <a
- * href="http://doi.acm.org/10.1145/1064979.1064998"
- * >"Optimized Interval Splitting in a Linear Scan Register Allocator"</a> by Christian Wimmer and
- * Hanspeter Moessenboeck.
- */
-final class TraceLinearScan {
-
-    public static class Options {
-        // @formatter:off
-        @Option(help = "Enable spill position optimization", type = OptionType.Debug)
-        public static final OptionValue<Boolean> LIROptTraceRAEliminateSpillMoves = new NestedBooleanOptionValue(LIRPhase.Options.LIROptimization, true);
-        // @formatter:on
-    }
-
-    private static final TraceLinearScanRegisterAllocationPhase TRACE_LINEAR_SCAN_REGISTER_ALLOCATION_PHASE = new TraceLinearScanRegisterAllocationPhase();
-    private static final TraceLinearScanAssignLocationsPhase TRACE_LINEAR_SCAN_ASSIGN_LOCATIONS_PHASE = new TraceLinearScanAssignLocationsPhase();
-    private static final TraceLinearScanEliminateSpillMovePhase TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE = new TraceLinearScanEliminateSpillMovePhase();
-    private static final TraceLinearScanResolveDataFlowPhase TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE = new TraceLinearScanResolveDataFlowPhase();
-    private static final TraceLinearScanLifetimeAnalysisPhase TRACE_LINEAR_SCAN_LIFETIME_ANALYSIS_PHASE = new TraceLinearScanLifetimeAnalysisPhase();
-
-    public static class BlockData {
-
-        /**
-         * Bit map specifying which operands are live upon entry to this block. These are values
-         * used in this block or any of its successors where such value are not defined in this
-         * block. The bit index of an operand is its
-         * {@linkplain TraceLinearScan#operandNumber(Value) operand number}.
-         */
-        public BitSet liveIn;
-
-        /**
-         * Bit map specifying which operands are live upon exit from this block. These are values
-         * used in a successor block that are either defined in this block or were live upon entry
-         * to this block. The bit index of an operand is its
-         * {@linkplain TraceLinearScan#operandNumber(Value) operand number}.
-         */
-        public BitSet liveOut;
-
-        /**
-         * Bit map specifying which operands are used (before being defined) in this block. That is,
-         * these are the values that are live upon entry to the block. The bit index of an operand
-         * is its {@linkplain TraceLinearScan#operandNumber(Value) operand number}.
-         */
-        public BitSet liveGen;
-
-        /**
-         * Bit map specifying which operands are defined/overwritten in this block. The bit index of
-         * an operand is its {@linkplain TraceLinearScan#operandNumber(Value) operand number}.
-         */
-        public BitSet liveKill;
-    }
-
-    public static final int DOMINATOR_SPILL_MOVE_ID = -2;
-    private static final int SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT = 1;
-
-    private final LIR ir;
-    private final FrameMapBuilder frameMapBuilder;
-    private final RegisterAttributes[] registerAttributes;
-    private final Register[] registers;
-    private final RegisterAllocationConfig regAllocConfig;
-    private final MoveFactory moveFactory;
-
-    private final BlockMap<BlockData> blockData;
-
-    /**
-     * List of blocks in linear-scan order. This is only correct as long as the CFG does not change.
-     */
-    private final List<? extends AbstractBlockBase<?>> sortedBlocks;
-
-    /** @see #fixedIntervals() */
-    private final FixedInterval[] fixedIntervals;
-
-    /** @see #intervals() */
-    private TraceInterval[] intervals;
-
-    /**
-     * The number of valid entries in {@link #intervals}.
-     */
-    private int intervalsSize;
-
-    /**
-     * The index of the first entry in {@link #intervals} for a
-     * {@linkplain #createDerivedInterval(TraceInterval) derived interval}.
-     */
-    private int firstDerivedIntervalIndex = -1;
-
-    /**
-     * Intervals sorted by {@link TraceInterval#from()}.
-     */
-    private TraceInterval[] sortedIntervals;
-
-    /**
-     * Fixed intervals sorted by {@link FixedInterval#from()}.
-     */
-    private FixedInterval[] sortedFixedIntervals;
-
-    /**
-     * Map from an instruction {@linkplain LIRInstruction#id id} to the instruction. Entries should
-     * be retrieved with {@link #instructionForId(int)} as the id is not simply an index into this
-     * array.
-     */
-    private LIRInstruction[] opIdToInstructionMap;
-
-    /**
-     * Map from an instruction {@linkplain LIRInstruction#id id} to the
-     * {@linkplain AbstractBlockBase block} containing the instruction. Entries should be retrieved
-     * with {@link #blockForId(int)} as the id is not simply an index into this array.
-     */
-    private AbstractBlockBase<?>[] opIdToBlockMap;
-
-    protected final TraceBuilderResult<?> traceBuilderResult;
-    private final boolean neverSpillConstants;
-
-    protected TraceLinearScan(TargetDescription target, LIRGenerationResult res, MoveFactory spillMoveFactory, RegisterAllocationConfig regAllocConfig,
-                    List<? extends AbstractBlockBase<?>> sortedBlocks, TraceBuilderResult<?> traceBuilderResult, boolean neverSpillConstants) {
-        this.ir = res.getLIR();
-        this.moveFactory = spillMoveFactory;
-        this.frameMapBuilder = res.getFrameMapBuilder();
-        this.sortedBlocks = sortedBlocks;
-        this.registerAttributes = regAllocConfig.getRegisterConfig().getAttributesMap();
-        this.regAllocConfig = regAllocConfig;
-
-        this.registers = target.arch.getRegisters();
-        this.fixedIntervals = new FixedInterval[registers.length];
-        this.blockData = new BlockMap<>(ir.getControlFlowGraph());
-        this.traceBuilderResult = traceBuilderResult;
-        this.neverSpillConstants = neverSpillConstants;
-    }
-
-    public int getFirstLirInstructionId(AbstractBlockBase<?> block) {
-        int result = ir.getLIRforBlock(block).get(0).id();
-        assert result >= 0;
-        return result;
-    }
-
-    public int getLastLirInstructionId(AbstractBlockBase<?> block) {
-        List<LIRInstruction> instructions = ir.getLIRforBlock(block);
-        int result = instructions.get(instructions.size() - 1).id();
-        assert result >= 0;
-        return result;
-    }
-
-    public MoveFactory getSpillMoveFactory() {
-        return moveFactory;
-    }
-
-    protected TraceLocalMoveResolver createMoveResolver() {
-        TraceLocalMoveResolver moveResolver = new TraceLocalMoveResolver(this);
-        assert moveResolver.checkEmpty();
-        return moveResolver;
-    }
-
-    public static boolean isVariableOrRegister(Value value) {
-        return isVariable(value) || isRegister(value);
-    }
-
-    /**
-     * Converts an operand (variable or register) to an index in a flat address space covering all
-     * the {@linkplain Variable variables} and {@linkplain RegisterValue registers} being processed
-     * by this allocator.
-     */
-    @SuppressWarnings("static-method")
-    int operandNumber(Value operand) {
-        assert !isRegister(operand) : "Register do not have operand numbers: " + operand;
-        assert isVariable(operand) : "Unsupported Value " + operand;
-        return ((Variable) operand).index;
-    }
-
-    /**
-     * Gets the number of operands. This value will increase by 1 for new variable.
-     */
-    int operandSize() {
-        return ir.numVariables();
-    }
-
-    /**
-     * Gets the number of registers. This value will never change.
-     */
-    int numRegisters() {
-        return registers.length;
-    }
-
-    public BlockData getBlockData(AbstractBlockBase<?> block) {
-        return blockData.get(block);
-    }
-
-    void initBlockData(AbstractBlockBase<?> block) {
-        blockData.put(block, new BlockData());
-    }
-
-    static final IntervalPredicate IS_PRECOLORED_INTERVAL = new IntervalPredicate() {
-
-        @Override
-        public boolean apply(TraceInterval i) {
-            return isRegister(i.operand);
-        }
-    };
-
-    static final IntervalPredicate IS_VARIABLE_INTERVAL = new IntervalPredicate() {
-
-        @Override
-        public boolean apply(TraceInterval i) {
-            return isVariable(i.operand);
-        }
-    };
-
-    static final IntervalPredicate IS_STACK_INTERVAL = new IntervalPredicate() {
-
-        @Override
-        public boolean apply(TraceInterval i) {
-            return !isRegister(i.operand);
-        }
-    };
-
-    /**
-     * Gets an object describing the attributes of a given register according to this register
-     * configuration.
-     */
-    public RegisterAttributes attributes(Register reg) {
-        return registerAttributes[reg.number];
-    }
-
-    void assignSpillSlot(TraceInterval interval) {
-        /*
-         * Assign the canonical spill slot of the parent (if a part of the interval is already
-         * spilled) or allocate a new spill slot.
-         */
-        if (interval.canMaterialize()) {
-            interval.assignLocation(Value.ILLEGAL);
-        } else if (interval.spillSlot() != null) {
-            interval.assignLocation(interval.spillSlot());
-        } else {
-            VirtualStackSlot slot = frameMapBuilder.allocateSpillSlot(interval.kind());
-            interval.setSpillSlot(slot);
-            interval.assignLocation(slot);
-        }
-    }
-
-    /**
-     * Map from {@linkplain #operandNumber(Value) operand numbers} to intervals.
-     */
-    public TraceInterval[] intervals() {
-        return intervals;
-    }
-
-    /**
-     * Map from {@linkplain #operandNumber(Value) operand numbers} to intervals.
-     */
-    public FixedInterval[] fixedIntervals() {
-        return fixedIntervals;
-    }
-
-    void initIntervals() {
-        intervalsSize = operandSize();
-        intervals = new TraceInterval[intervalsSize + (intervalsSize >> SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT)];
-    }
-
-    /**
-     * Creates a new fixed interval.
-     *
-     * @param reg the operand for the interval
-     * @return the created interval
-     */
-    FixedInterval createFixedInterval(RegisterValue reg) {
-        FixedInterval interval = new FixedInterval(reg);
-        int operandNumber = reg.getRegister().number;
-        assert fixedIntervals[operandNumber] == null;
-        fixedIntervals[operandNumber] = interval;
-        return interval;
-    }
-
-    /**
-     * Creates a new interval.
-     *
-     * @param operand the operand for the interval
-     * @return the created interval
-     */
-    TraceInterval createInterval(AllocatableValue operand) {
-        assert isLegal(operand);
-        int operandNumber = operandNumber(operand);
-        TraceInterval interval = new TraceInterval(operand, operandNumber);
-        assert operandNumber < intervalsSize;
-        assert intervals[operandNumber] == null;
-        intervals[operandNumber] = interval;
-        return interval;
-    }
-
-    /**
-     * Creates an interval as a result of splitting or spilling another interval.
-     *
-     * @param source an interval being split of spilled
-     * @return a new interval derived from {@code source}
-     */
-    TraceInterval createDerivedInterval(TraceInterval source) {
-        if (firstDerivedIntervalIndex == -1) {
-            firstDerivedIntervalIndex = intervalsSize;
-        }
-        if (intervalsSize == intervals.length) {
-            intervals = Arrays.copyOf(intervals, intervals.length + (intervals.length >> SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT));
-        }
-        intervalsSize++;
-        Variable variable = new Variable(source.kind(), ir.nextVariable());
-
-        TraceInterval interval = createInterval(variable);
-        assert intervals[intervalsSize - 1] == interval;
-        return interval;
-    }
-
-    // access to block list (sorted in linear scan order)
-    public int blockCount() {
-        return sortedBlocks.size();
-    }
-
-    public AbstractBlockBase<?> blockAt(int index) {
-        return sortedBlocks.get(index);
-    }
-
-    /**
-     * Gets the size of the {@link BlockData#liveIn} and {@link BlockData#liveOut} sets for a basic
-     * block. These sets do not include any operands allocated as a result of creating
-     * {@linkplain #createDerivedInterval(TraceInterval) derived intervals}.
-     */
-    public int liveSetSize() {
-        return firstDerivedIntervalIndex == -1 ? operandSize() : firstDerivedIntervalIndex;
-    }
-
-    int numLoops() {
-        return ir.getControlFlowGraph().getLoops().size();
-    }
-
-    public FixedInterval fixedIntervalFor(RegisterValue reg) {
-        return fixedIntervals[reg.getRegister().number];
-    }
-
-    public FixedInterval getOrCreateFixedInterval(RegisterValue reg) {
-        FixedInterval ret = fixedIntervalFor(reg);
-        if (ret == null) {
-            return createFixedInterval(reg);
-        } else {
-            return ret;
-        }
-    }
-
-    TraceInterval intervalFor(int operandNumber) {
-        return intervals[operandNumber];
-    }
-
-    public TraceInterval intervalFor(Value operand) {
-        int operandNumber = operandNumber(operand);
-        assert operandNumber < intervalsSize;
-        return intervals[operandNumber];
-    }
-
-    public TraceInterval getOrCreateInterval(AllocatableValue operand) {
-        TraceInterval ret = intervalFor(operand);
-        if (ret == null) {
-            return createInterval(operand);
-        } else {
-            return ret;
-        }
-    }
-
-    void initOpIdMaps(int numInstructions) {
-        opIdToInstructionMap = new LIRInstruction[numInstructions];
-        opIdToBlockMap = new AbstractBlockBase<?>[numInstructions];
-    }
-
-    void putOpIdMaps(int index, LIRInstruction op, AbstractBlockBase<?> block) {
-        opIdToInstructionMap[index] = op;
-        opIdToBlockMap[index] = block;
-    }
-
-    /**
-     * Gets the highest instruction id allocated by this object.
-     */
-    int maxOpId() {
-        assert opIdToInstructionMap.length > 0 : "no operations";
-        return (opIdToInstructionMap.length - 1) << 1;
-    }
-
-    /**
-     * Converts an {@linkplain LIRInstruction#id instruction id} to an instruction index. All LIR
-     * instructions in a method have an index one greater than their linear-scan order predecessor
-     * with the first instruction having an index of 0.
-     */
-    private static int opIdToIndex(int opId) {
-        return opId >> 1;
-    }
-
-    /**
-     * Retrieves the {@link LIRInstruction} based on its {@linkplain LIRInstruction#id id}.
-     *
-     * @param opId an instruction {@linkplain LIRInstruction#id id}
-     * @return the instruction whose {@linkplain LIRInstruction#id} {@code == id}
-     */
-    public LIRInstruction instructionForId(int opId) {
-        assert isEven(opId) : "opId not even";
-        LIRInstruction instr = opIdToInstructionMap[opIdToIndex(opId)];
-        assert instr.id() == opId;
-        return instr;
-    }
-
-    /**
-     * Gets the block containing a given instruction.
-     *
-     * @param opId an instruction {@linkplain LIRInstruction#id id}
-     * @return the block containing the instruction denoted by {@code opId}
-     */
-    public AbstractBlockBase<?> blockForId(int opId) {
-        assert opIdToBlockMap.length > 0 && opId >= 0 && opId <= maxOpId() + 1 : "opId out of range: " + opId;
-        return opIdToBlockMap[opIdToIndex(opId)];
-    }
-
-    boolean isBlockBegin(int opId) {
-        return opId == 0 || blockForId(opId) != blockForId(opId - 1);
-    }
-
-    boolean isBlockEnd(int opId) {
-        boolean isBlockBegin = isBlockBegin(opId + 2);
-        assert isBlockBegin == (instructionForId(opId & (~1)) instanceof BlockEndOp);
-        return isBlockBegin;
-    }
-
-    boolean coversBlockBegin(int opId1, int opId2) {
-        return blockForId(opId1) != blockForId(opId2);
-    }
-
-    /**
-     * Determines if an {@link LIRInstruction} destroys all caller saved registers.
-     *
-     * @param opId an instruction {@linkplain LIRInstruction#id id}
-     * @return {@code true} if the instruction denoted by {@code id} destroys all caller saved
-     *         registers.
-     */
-    boolean hasCall(int opId) {
-        assert isEven(opId) : "opId not even";
-        return instructionForId(opId).destroysCallerSavedRegisters();
-    }
-
-    abstract static class IntervalPredicate {
-
-        abstract boolean apply(TraceInterval i);
-    }
-
-    public boolean isProcessed(Value operand) {
-        return !isRegister(operand) || attributes(asRegister(operand)).isAllocatable();
-    }
-
-    // * Phase 5: actual register allocation
-
-    private static <T extends IntervalHint> boolean isSortedByFrom(T[] intervals) {
-        int from = -1;
-        for (T interval : intervals) {
-            assert interval != null;
-            assert from <= interval.from();
-            from = interval.from();
-        }
-        return true;
-    }
-
-    private static boolean isSortedBySpillPos(TraceInterval[] intervals) {
-        int from = -1;
-        for (TraceInterval interval : intervals) {
-            assert interval != null;
-            assert from <= interval.spillDefinitionPos();
-            from = interval.spillDefinitionPos();
-        }
-        return true;
-    }
-
-    private static TraceInterval addToList(TraceInterval first, TraceInterval prev, TraceInterval interval) {
-        TraceInterval newFirst = first;
-        if (prev != null) {
-            prev.next = interval;
-        } else {
-            newFirst = interval;
-        }
-        return newFirst;
-    }
-
-    TraceInterval createUnhandledListByFrom(IntervalPredicate isList1) {
-        assert isSortedByFrom(sortedIntervals) : "interval list is not sorted";
-        return createUnhandledList(isList1);
-    }
-
-    TraceInterval createUnhandledListBySpillPos(IntervalPredicate isList1) {
-        assert isSortedBySpillPos(sortedIntervals) : "interval list is not sorted";
-        return createUnhandledList(isList1);
-    }
-
-    private TraceInterval createUnhandledList(IntervalPredicate isList1) {
-
-        TraceInterval list1 = TraceInterval.EndMarker;
-
-        TraceInterval list1Prev = null;
-        TraceInterval v;
-
-        int n = sortedIntervals.length;
-        for (int i = 0; i < n; i++) {
-            v = sortedIntervals[i];
-            if (v == null) {
-                continue;
-            }
-
-            if (isList1.apply(v)) {
-                list1 = addToList(list1, list1Prev, v);
-                list1Prev = v;
-            }
-        }
-
-        if (list1Prev != null) {
-            list1Prev.next = TraceInterval.EndMarker;
-        }
-
-        assert list1Prev == null || list1Prev.next == TraceInterval.EndMarker : "linear list ends not with sentinel";
-
-        return list1;
-    }
-
-    private static FixedInterval addToList(FixedInterval first, FixedInterval prev, FixedInterval interval) {
-        FixedInterval newFirst = first;
-        if (prev != null) {
-            prev.next = interval;
-        } else {
-            newFirst = interval;
-        }
-        return newFirst;
-    }
-
-    FixedInterval createFixedUnhandledList() {
-        assert isSortedByFrom(sortedFixedIntervals) : "interval list is not sorted";
-
-        FixedInterval list1 = FixedInterval.EndMarker;
-
-        FixedInterval list1Prev = null;
-        FixedInterval v;
-
-        int n = sortedFixedIntervals.length;
-        for (int i = 0; i < n; i++) {
-            v = sortedFixedIntervals[i];
-            if (v == null) {
-                continue;
-            }
-
-            v.rewindRange();
-            list1 = addToList(list1, list1Prev, v);
-            list1Prev = v;
-        }
-
-        if (list1Prev != null) {
-            list1Prev.next = FixedInterval.EndMarker;
-        }
-
-        assert list1Prev == null || list1Prev.next == FixedInterval.EndMarker : "linear list ends not with sentinel";
-
-        return list1;
-    }
-
-    // SORTING
-
-    protected void sortIntervalsBeforeAllocation() {
-        int sortedLen = 0;
-        for (TraceInterval interval : intervals) {
-            if (interval != null) {
-                sortedLen++;
-            }
-        }
-        sortedIntervals = sortIntervalsBeforeAllocation(intervals, new TraceInterval[sortedLen]);
-    }
-
-    protected void sortFixedIntervalsBeforeAllocation() {
-        int sortedLen = 0;
-        for (FixedInterval interval : fixedIntervals) {
-            if (interval != null) {
-                sortedLen++;
-            }
-        }
-        sortedFixedIntervals = sortIntervalsBeforeAllocation(fixedIntervals, new FixedInterval[sortedLen]);
-    }
-
-    private static <T extends IntervalHint> T[] sortIntervalsBeforeAllocation(T[] intervals, T[] sortedList) {
-        int sortedIdx = 0;
-        int sortedFromMax = -1;
-
-        // special sorting algorithm: the original interval-list is almost sorted,
-        // only some intervals are swapped. So this is much faster than a complete QuickSort
-        for (T interval : intervals) {
-            if (interval != null) {
-                int from = interval.from();
-
-                if (sortedFromMax <= from) {
-                    sortedList[sortedIdx++] = interval;
-                    sortedFromMax = interval.from();
-                } else {
-                    // the assumption that the intervals are already sorted failed,
-                    // so this interval must be sorted in manually
-                    int j;
-                    for (j = sortedIdx - 1; j >= 0 && from < sortedList[j].from(); j--) {
-                        sortedList[j + 1] = sortedList[j];
-                    }
-                    sortedList[j + 1] = interval;
-                    sortedIdx++;
-                }
-            }
-        }
-        return sortedList;
-    }
-
-    void sortIntervalsAfterAllocation() {
-        if (firstDerivedIntervalIndex == -1) {
-            // no intervals have been added during allocation, so sorted list is already up to date
-            return;
-        }
-
-        TraceInterval[] oldList = sortedIntervals;
-        TraceInterval[] newList = Arrays.copyOfRange(intervals, firstDerivedIntervalIndex, intervalsSize);
-        int oldLen = oldList.length;
-        int newLen = newList.length;
-
-        // conventional sort-algorithm for new intervals
-        Arrays.sort(newList, (TraceInterval a, TraceInterval b) -> a.from() - b.from());
-
-        // merge old and new list (both already sorted) into one combined list
-        TraceInterval[] combinedList = new TraceInterval[oldLen + newLen];
-        int oldIdx = 0;
-        int newIdx = 0;
-
-        while (oldIdx + newIdx < combinedList.length) {
-            if (newIdx >= newLen || (oldIdx < oldLen && oldList[oldIdx].from() <= newList[newIdx].from())) {
-                combinedList[oldIdx + newIdx] = oldList[oldIdx];
-                oldIdx++;
-            } else {
-                combinedList[oldIdx + newIdx] = newList[newIdx];
-                newIdx++;
-            }
-        }
-
-        sortedIntervals = combinedList;
-    }
-
-    void sortIntervalsBySpillPos() {
-        // TODO (JE): better algorithm?
-        // conventional sort-algorithm for new intervals
-        Arrays.sort(sortedIntervals, (TraceInterval a, TraceInterval b) -> a.spillDefinitionPos() - b.spillDefinitionPos());
-    }
-
-    // wrapper for Interval.splitChildAtOpId that performs a bailout in product mode
-    // instead of returning null
-    public TraceInterval splitChildAtOpId(TraceInterval interval, int opId, LIRInstruction.OperandMode mode) {
-        TraceInterval result = interval.getSplitChildAtOpId(opId, mode, this);
-
-        if (result != null) {
-            if (Debug.isLogEnabled()) {
-                Debug.log("Split child at pos %d of interval %s is %s", opId, interval, result);
-            }
-            return result;
-        }
-
-        throw new BailoutException("LinearScan: interval is null");
-    }
-
-    static AllocatableValue canonicalSpillOpr(TraceInterval interval) {
-        assert interval.spillSlot() != null : "canonical spill slot not set";
-        return interval.spillSlot();
-    }
-
-    boolean isMaterialized(AllocatableValue operand, int opId, OperandMode mode) {
-        TraceInterval interval = intervalFor(operand);
-        assert interval != null : "interval must exist";
-
-        if (opId != -1) {
-            /*
-             * Operands are not changed when an interval is split during allocation, so search the
-             * right interval here.
-             */
-            interval = splitChildAtOpId(interval, opId, mode);
-        }
-
-        return isIllegal(interval.location()) && interval.canMaterialize();
-    }
-
-    boolean isCallerSave(Value operand) {
-        return attributes(asRegister(operand)).isCallerSave();
-    }
-
-    @SuppressWarnings("try")
-    protected <B extends AbstractBlockBase<B>> void allocate(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder, MoveFactory spillMoveFactory,
-                    RegisterAllocationConfig registerAllocationConfig) {
-
-        /*
-         * This is the point to enable debug logging for the whole register allocation.
-         */
-        try (Indent indent = Debug.logAndIndent("LinearScan allocate")) {
-            TraceLinearScanAllocationContext context = new TraceLinearScanAllocationContext(spillMoveFactory, registerAllocationConfig, traceBuilderResult, this);
-
-            TRACE_LINEAR_SCAN_LIFETIME_ANALYSIS_PHASE.apply(target, lirGenRes, codeEmittingOrder, linearScanOrder, context, false);
-
-            try (Scope s = Debug.scope("AfterLifetimeAnalysis", (Object) intervals())) {
-                sortIntervalsBeforeAllocation();
-                sortFixedIntervalsBeforeAllocation();
-
-                TRACE_LINEAR_SCAN_REGISTER_ALLOCATION_PHASE.apply(target, lirGenRes, codeEmittingOrder, linearScanOrder, context, false);
-
-                // resolve intra-trace data-flow
-                TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE.apply(target, lirGenRes, codeEmittingOrder, linearScanOrder, context, false);
-                Debug.dump(TraceRegisterAllocationPhase.TRACE_DUMP_LEVEL, sortedBlocks(), "%s", TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE.getName());
-
-                // eliminate spill moves
-                if (Options.LIROptTraceRAEliminateSpillMoves.getValue()) {
-                    TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE.apply(target, lirGenRes, codeEmittingOrder, linearScanOrder, context, false);
-                    Debug.dump(TraceRegisterAllocationPhase.TRACE_DUMP_LEVEL, sortedBlocks(), "%s", TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE.getName());
-                }
-
-                TRACE_LINEAR_SCAN_ASSIGN_LOCATIONS_PHASE.apply(target, lirGenRes, codeEmittingOrder, linearScanOrder, context, false);
-
-                if (DetailedAsserts.getValue()) {
-                    verifyIntervals();
-                }
-            } catch (Throwable e) {
-                throw Debug.handle(e);
-            }
-        }
-    }
-
-    @SuppressWarnings("try")
-    public void printIntervals(String label) {
-        if (Debug.isDumpEnabled(TraceRegisterAllocationPhase.TRACE_DUMP_LEVEL)) {
-            if (Debug.isLogEnabled()) {
-                try (Indent indent = Debug.logAndIndent("intervals %s", label)) {
-                    for (FixedInterval interval : fixedIntervals) {
-                        if (interval != null) {
-                            Debug.log("%s", interval.logString(this));
-                        }
-                    }
-
-                    for (TraceInterval interval : intervals) {
-                        if (interval != null) {
-                            Debug.log("%s", interval.logString(this));
-                        }
-                    }
-
-                    try (Indent indent2 = Debug.logAndIndent("Basic Blocks")) {
-                        for (int i = 0; i < blockCount(); i++) {
-                            AbstractBlockBase<?> block = blockAt(i);
-                            Debug.log("B%d [%d, %d, %s] ", block.getId(), getFirstLirInstructionId(block), getLastLirInstructionId(block), block.getLoop());
-                        }
-                    }
-                }
-            }
-            Debug.dump(new TraceIntervalDumper(Arrays.copyOf(fixedIntervals, fixedIntervals.length), Arrays.copyOf(intervals, intervalsSize)), label);
-        }
-    }
-
-    public void printLir(String label, @SuppressWarnings("unused") boolean hirValid) {
-        if (Debug.isDumpEnabled(TraceRegisterAllocationPhase.TRACE_DUMP_LEVEL)) {
-            Debug.dump(TraceRegisterAllocationPhase.TRACE_DUMP_LEVEL, sortedBlocks(), label);
-        }
-    }
-
-    boolean verify() {
-        // (check that all intervals have a correct register and that no registers are overwritten)
-        verifyIntervals();
-
-        verifyRegisters();
-
-        Debug.log("no errors found");
-
-        return true;
-    }
-
-    @SuppressWarnings("try")
-    private void verifyRegisters() {
-        // Enable this logging to get output for the verification process.
-        try (Indent indent = Debug.logAndIndent("verifying register allocation")) {
-            RegisterVerifier verifier = new RegisterVerifier(this);
-            verifier.verify(blockAt(0));
-        }
-    }
-
-    @SuppressWarnings("try")
-    protected void verifyIntervals() {
-        try (Indent indent = Debug.logAndIndent("verifying intervals")) {
-            int len = intervalsSize;
-
-            for (int i = 0; i < len; i++) {
-                final TraceInterval i1 = intervals[i];
-                if (i1 == null) {
-                    continue;
-                }
-
-                i1.checkSplitChildren();
-
-                if (i1.operandNumber != i) {
-                    Debug.log("Interval %d is on position %d in list", i1.operandNumber, i);
-                    Debug.log(i1.logString(this));
-                    throw new JVMCIError("");
-                }
-
-                if (isVariable(i1.operand) && i1.kind().equals(LIRKind.Illegal)) {
-                    Debug.log("Interval %d has no type assigned", i1.operandNumber);
-                    Debug.log(i1.logString(this));
-                    throw new JVMCIError("");
-                }
-
-                if (i1.location() == null) {
-                    Debug.log("Interval %d has no register assigned", i1.operandNumber);
-                    Debug.log(i1.logString(this));
-                    throw new JVMCIError("");
-                }
-
-                if (i1.isEmpty()) {
-                    Debug.log("Interval %d has no Range", i1.operandNumber);
-                    Debug.log(i1.logString(this));
-                    throw new JVMCIError("");
-                }
-
-                if (i1.from() >= i1.to()) {
-                    Debug.log("Interval %d has zero length range", i1.operandNumber);
-                    Debug.log(i1.logString(this));
-                    throw new JVMCIError("");
-                }
-
-                // special intervals that are created in MoveResolver
-                // . ignore them because the range information has no meaning there
-                if (i1.from() == 1 && i1.to() == 2) {
-                    continue;
-                }
-                // check any intervals
-                for (int j = i + 1; j < len; j++) {
-                    final TraceInterval i2 = intervals[j];
-                    if (i2 == null) {
-                        continue;
-                    }
-
-                    // special intervals that are created in MoveResolver
-                    // . ignore them because the range information has no meaning there
-                    if (i2.from() == 1 && i2.to() == 2) {
-                        continue;
-                    }
-                    Value l1 = i1.location();
-                    Value l2 = i2.location();
-                    boolean intersects = i1.intersects(i2);
-                    if (intersects && !isIllegal(l1) && (l1.equals(l2))) {
-                        if (DetailedAsserts.getValue()) {
-                            Debug.log("Intervals %s and %s overlap and have the same register assigned", i1, i2);
-                            Debug.log(i1.logString(this));
-                            Debug.log(i2.logString(this));
-                        }
-                        throw new BailoutException("");
-                    }
-                }
-                // check fixed intervals
-                for (FixedInterval i2 : fixedIntervals) {
-                    if (i2 == null) {
-                        continue;
-                    }
-
-                    Value l1 = i1.location();
-                    Value l2 = i2.location();
-                    boolean intersects = i2.intersects(i1);
-                    if (intersects && !isIllegal(l1) && (l1.equals(l2))) {
-                        if (DetailedAsserts.getValue()) {
-                            Debug.log("Intervals %s and %s overlap and have the same register assigned", i1, i2);
-                            Debug.log(i1.logString(this));
-                            Debug.log(i2.logString(this));
-                        }
-                        throw new BailoutException("");
-                    }
-                }
-            }
-        }
-    }
-
-    class CheckConsumer implements ValueConsumer {
-
-        boolean ok;
-        FixedInterval curInterval;
-
-        @Override
-        public void visitValue(Value operand, OperandMode mode, EnumSet<OperandFlag> flags) {
-            if (isRegister(operand)) {
-                if (fixedIntervalFor(asRegisterValue(operand)) == curInterval) {
-                    ok = true;
-                }
-            }
-        }
-    }
-
-    @SuppressWarnings("try")
-    void verifyNoOopsInFixedIntervals() {
-        try (Indent indent = Debug.logAndIndent("verifying that no oops are in fixed intervals *")) {
-            CheckConsumer checkConsumer = new CheckConsumer();
-
-            TraceInterval otherIntervals;
-            FixedInterval fixedInts = createFixedUnhandledList();
-            // to ensure a walking until the last instruction id, add a dummy interval
-            // with a high operation id
-            otherIntervals = new TraceInterval(Value.ILLEGAL, -1);
-            otherIntervals.addRange(Integer.MAX_VALUE - 2, Integer.MAX_VALUE - 1);
-            TraceIntervalWalker iw = new TraceIntervalWalker(this, fixedInts, otherIntervals);
-
-            for (AbstractBlockBase<?> block : sortedBlocks) {
-                List<LIRInstruction> instructions = ir.getLIRforBlock(block);
-
-                for (int j = 0; j < instructions.size(); j++) {
-                    LIRInstruction op = instructions.get(j);
-
-                    if (op.hasState()) {
-                        iw.walkBefore(op.id());
-                        boolean checkLive = true;
-
-                        /*
-                         * Make sure none of the fixed registers is live across an oopmap since we
-                         * can't handle that correctly.
-                         */
-                        if (checkLive) {
-                            for (FixedInterval interval = iw.activeFixedList.getFixed(); interval != FixedInterval.EndMarker; interval = interval.next) {
-                                if (interval.to() > op.id() + 1) {
-                                    /*
-                                     * This interval is live out of this op so make sure that this
-                                     * interval represents some value that's referenced by this op
-                                     * either as an input or output.
-                                     */
-                                    checkConsumer.curInterval = interval;
-                                    checkConsumer.ok = false;
-
-                                    op.visitEachInput(checkConsumer);
-                                    op.visitEachAlive(checkConsumer);
-                                    op.visitEachTemp(checkConsumer);
-                                    op.visitEachOutput(checkConsumer);
-
-                                    assert checkConsumer.ok : "fixed intervals should never be live across an oopmap point";
-                                }
-                            }
-                        }
-                    }
-                }
-            }
-        }
-    }
-
-    public LIR getLIR() {
-        return ir;
-    }
-
-    public FrameMapBuilder getFrameMapBuilder() {
-        return frameMapBuilder;
-    }
-
-    public List<? extends AbstractBlockBase<?>> sortedBlocks() {
-        return sortedBlocks;
-    }
-
-    public Register[] getRegisters() {
-        return registers;
-    }
-
-    public RegisterAllocationConfig getRegisterAllocationConfig() {
-        return regAllocConfig;
-    }
-
-    public boolean callKillsRegisters() {
-        return regAllocConfig.getRegisterConfig().areAllAllocatableRegistersCallerSaved();
-    }
-
-    boolean neverSpillConstants() {
-        return neverSpillConstants;
-    }
-
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanAllocationPhase.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import com.oracle.graal.compiler.common.alloc.RegisterAllocationConfig;
-import com.oracle.graal.compiler.common.alloc.TraceBuilder.TraceBuilderResult;
-import com.oracle.graal.lir.gen.LIRGeneratorTool.MoveFactory;
-import com.oracle.graal.lir.phases.LIRPhase;
-
-public abstract class TraceLinearScanAllocationPhase extends LIRPhase<TraceLinearScanAllocationPhase.TraceLinearScanAllocationContext> {
-
-    public static final class TraceLinearScanAllocationContext {
-        public final MoveFactory spillMoveFactory;
-        public final RegisterAllocationConfig registerAllocationConfig;
-        public final TraceBuilderResult<?> traceBuilderResult;
-        public final TraceLinearScan allocator;
-
-        public TraceLinearScanAllocationContext(MoveFactory spillMoveFactory, RegisterAllocationConfig registerAllocationConfig, TraceBuilderResult<?> traceBuilderResult, TraceLinearScan allocator) {
-            this.spillMoveFactory = spillMoveFactory;
-            this.registerAllocationConfig = registerAllocationConfig;
-            this.traceBuilderResult = traceBuilderResult;
-            this.allocator = allocator;
-        }
-    }
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanAssignLocationsPhase.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,286 +0,0 @@
-/*
- * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import static com.oracle.graal.compiler.common.GraalOptions.DetailedAsserts;
-import static com.oracle.graal.lir.LIRValueUtil.isConstantValue;
-import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
-import static com.oracle.graal.lir.LIRValueUtil.isVariable;
-import static com.oracle.graal.lir.LIRValueUtil.isVirtualStackSlot;
-import static com.oracle.graal.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAshareSpillInformation;
-import static jdk.vm.ci.code.ValueUtil.isIllegal;
-import static jdk.vm.ci.code.ValueUtil.isRegister;
-
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.List;
-
-import jdk.vm.ci.code.RegisterValue;
-import jdk.vm.ci.code.StackSlot;
-import jdk.vm.ci.code.TargetDescription;
-import jdk.vm.ci.meta.AllocatableValue;
-import jdk.vm.ci.meta.Value;
-
-import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
-import com.oracle.graal.debug.Debug;
-import com.oracle.graal.debug.Indent;
-import com.oracle.graal.lir.ConstantValue;
-import com.oracle.graal.lir.InstructionValueProcedure;
-import com.oracle.graal.lir.LIRFrameState;
-import com.oracle.graal.lir.LIRInstruction;
-import com.oracle.graal.lir.LIRInstruction.OperandFlag;
-import com.oracle.graal.lir.LIRInstruction.OperandMode;
-import com.oracle.graal.lir.StandardOp;
-import com.oracle.graal.lir.StandardOp.BlockEndOp;
-import com.oracle.graal.lir.StandardOp.LabelOp;
-import com.oracle.graal.lir.StandardOp.MoveOp;
-import com.oracle.graal.lir.StandardOp.ValueMoveOp;
-import com.oracle.graal.lir.Variable;
-import com.oracle.graal.lir.gen.LIRGenerationResult;
-import com.oracle.graal.lir.gen.LIRGeneratorTool.MoveFactory;
-
-/**
- * Specialization of {@link com.oracle.graal.lir.alloc.lsra.LinearScanAssignLocationsPhase} that
- * inserts {@link ShadowedRegisterValue}s to describe {@link RegisterValue}s that are also available
- * on the {@link StackSlot stack}.
- */
-final class TraceLinearScanAssignLocationsPhase extends TraceLinearScanAllocationPhase {
-
-    @Override
-    protected <B extends AbstractBlockBase<B>> void run(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder,
-                    TraceLinearScanAllocationContext context) {
-        TraceLinearScan allocator = context.allocator;
-        MoveFactory spillMoveFactory = context.spillMoveFactory;
-        new Assigner(allocator, spillMoveFactory).assignLocations();
-    }
-
-    private static final class Assigner {
-        private final TraceLinearScan allocator;
-        private final MoveFactory spillMoveFactory;
-
-        private Assigner(TraceLinearScan allocator, MoveFactory spillMoveFactory) {
-            this.allocator = allocator;
-            this.spillMoveFactory = spillMoveFactory;
-        }
-
-        /**
-         * Assigns the allocated location for an LIR instruction operand back into the instruction.
-         *
-         * @param op current {@link LIRInstruction}
-         * @param operand an LIR instruction operand
-         * @param mode the usage mode for {@code operand} by the instruction
-         * @return the location assigned for the operand
-         */
-        private Value colorLirOperand(LIRInstruction op, Variable operand, OperandMode mode) {
-            int opId = op.id();
-            TraceInterval interval = allocator.intervalFor(operand);
-            assert interval != null : "interval must exist";
-
-            if (opId != -1) {
-                if (DetailedAsserts.getValue()) {
-                    AbstractBlockBase<?> block = allocator.blockForId(opId);
-                    if (block.getSuccessorCount() <= 1 && opId == allocator.getLastLirInstructionId(block)) {
-                        /*
-                         * Check if spill moves could have been appended at the end of this block,
-                         * but before the branch instruction. So the split child information for
-                         * this branch would be incorrect.
-                         */
-                        LIRInstruction instr = allocator.getLIR().getLIRforBlock(block).get(allocator.getLIR().getLIRforBlock(block).size() - 1);
-                        if (instr instanceof StandardOp.JumpOp) {
-                            if (allocator.getBlockData(block).liveOut.get(allocator.operandNumber(operand))) {
-                                assert false : String.format(
-                                                "can't get split child for the last branch of a block because the information would be incorrect (moves are inserted before the branch in resolveDataFlow) block=%s, instruction=%s, operand=%s",
-                                                block, instr, operand);
-                            }
-                        }
-                    }
-                }
-
-                /*
-                 * Operands are not changed when an interval is split during allocation, so search
-                 * the right interval here.
-                 */
-                interval = allocator.splitChildAtOpId(interval, opId, mode);
-            }
-
-            if (isIllegal(interval.location()) && interval.canMaterialize()) {
-                assert mode != OperandMode.DEF;
-                return new ConstantValue(interval.kind(), interval.getMaterializedValue());
-            }
-            return interval.location();
-        }
-
-        /**
-         * @param op
-         * @param operand
-         * @param valueMode
-         * @param flags
-         * @see InstructionValueProcedure#doValue(LIRInstruction, Value, OperandMode, EnumSet)
-         */
-        private Value debugInfoProcedure(LIRInstruction op, Value operand, OperandMode valueMode, EnumSet<OperandFlag> flags) {
-            if (isVirtualStackSlot(operand)) {
-                return operand;
-            }
-            int tempOpId = op.id();
-            OperandMode mode = OperandMode.USE;
-            AbstractBlockBase<?> block = allocator.blockForId(tempOpId);
-            if (block.getSuccessorCount() == 1 && tempOpId == allocator.getLastLirInstructionId(block)) {
-                /*
-                 * Generating debug information for the last instruction of a block. If this
-                 * instruction is a branch, spill moves are inserted before this branch and so the
-                 * wrong operand would be returned (spill moves at block boundaries are not
-                 * considered in the live ranges of intervals).
-                 *
-                 * Solution: use the first opId of the branch target block instead.
-                 */
-                final LIRInstruction instr = allocator.getLIR().getLIRforBlock(block).get(allocator.getLIR().getLIRforBlock(block).size() - 1);
-                if (instr instanceof StandardOp.JumpOp) {
-                    if (allocator.getBlockData(block).liveOut.get(allocator.operandNumber(operand))) {
-                        tempOpId = allocator.getFirstLirInstructionId(block.getSuccessors().iterator().next());
-                        mode = OperandMode.DEF;
-                    }
-                }
-            }
-
-            /*
-             * Get current location of operand. The operand must be live because debug information
-             * is considered when building the intervals if the interval is not live,
-             * colorLirOperand will cause an assert on failure.
-             */
-            Value result = colorLirOperand(op, (Variable) operand, mode);
-            assert !allocator.hasCall(tempOpId) || isStackSlotValue(result) || isConstantValue(result) || !allocator.isCallerSave(result) : "cannot have caller-save register operands at calls";
-            return result;
-        }
-
-        private void computeDebugInfo(final LIRInstruction op, LIRFrameState info) {
-            info.forEachState(op, this::debugInfoProcedure);
-        }
-
-        private void assignLocations(List<LIRInstruction> instructions) {
-            int numInst = instructions.size();
-            boolean hasDead = false;
-
-            for (int j = 0; j < numInst; j++) {
-                final LIRInstruction op = instructions.get(j);
-                if (op == null) {
-                    /*
-                     * this can happen when spill-moves are removed in eliminateSpillMoves
-                     */
-                    hasDead = true;
-                } else if (assignLocations(op, instructions, j)) {
-                    hasDead = true;
-                }
-            }
-
-            if (hasDead) {
-                // Remove null values from the list.
-                instructions.removeAll(Collections.singleton(null));
-            }
-        }
-
-        /**
-         * Assigns the operand of an {@link LIRInstruction}.
-         *
-         * @param op The {@link LIRInstruction} that should be colored.
-         * @param j The index of {@code op} in the {@code instructions} list.
-         * @param instructions The instructions of the current block.
-         * @return {@code true} if the instruction was deleted.
-         */
-        private boolean assignLocations(LIRInstruction op, List<LIRInstruction> instructions, int j) {
-            assert op != null && instructions.get(j) == op;
-            if (TraceRAshareSpillInformation.getValue()) {
-                if (op instanceof BlockEndOp) {
-                    ((BlockEndOp) op).forEachOutgoingValue(colorOutgoingIncomingValues);
-                } else if (op instanceof LabelOp) {
-                    ((LabelOp) op).forEachIncomingValue(colorOutgoingIncomingValues);
-                }
-            }
-
-            InstructionValueProcedure assignProc = (inst, operand, mode, flags) -> isVariable(operand) ? colorLirOperand(inst, (Variable) operand, mode) : operand;
-            // remove useless moves
-            if (op instanceof MoveOp) {
-                AllocatableValue result = ((MoveOp) op).getResult();
-                if (isVariable(result) && allocator.isMaterialized(result, op.id(), OperandMode.DEF)) {
-                    /*
-                     * This happens if a materializable interval is originally not spilled but then
-                     * kicked out in LinearScanWalker.splitForSpilling(). When kicking out such an
-                     * interval this move operation was already generated.
-                     */
-                    instructions.set(j, null);
-                    return true;
-                }
-            }
-
-            op.forEachInput(assignProc);
-            op.forEachAlive(assignProc);
-            op.forEachTemp(assignProc);
-            op.forEachOutput(assignProc);
-
-            // compute reference map and debug information
-            op.forEachState((inst, state) -> computeDebugInfo(inst, state));
-
-            // remove useless moves
-            if (op instanceof ValueMoveOp) {
-                ValueMoveOp move = (ValueMoveOp) op;
-                if (move.getInput().equals(move.getResult())) {
-                    instructions.set(j, null);
-                    return true;
-                }
-                if (isStackSlotValue(move.getInput()) && isStackSlotValue(move.getResult())) {
-                    // rewrite stack to stack moves
-                    instructions.set(j, spillMoveFactory.createStackMove(move.getResult(), move.getInput()));
-                    return true;
-                }
-            }
-            return false;
-        }
-
-        @SuppressWarnings("try")
-        private void assignLocations() {
-            try (Indent indent = Debug.logAndIndent("assign locations")) {
-                for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
-                    try (Indent indent2 = Debug.logAndIndent("assign locations in block B%d", block.getId())) {
-                        assignLocations(allocator.getLIR().getLIRforBlock(block));
-                    }
-                }
-            }
-        }
-
-        private InstructionValueProcedure colorOutgoingIncomingValues = new InstructionValueProcedure() {
-
-            public Value doValue(LIRInstruction instruction, Value value, OperandMode mode, EnumSet<OperandFlag> flags) {
-                if (isVariable(value)) {
-                    TraceInterval interval = allocator.intervalFor(value);
-                    assert interval != null : "interval must exist";
-                    interval = allocator.splitChildAtOpId(interval, instruction.id(), mode);
-
-                    if (interval.inMemoryAt(instruction.id()) && isRegister(interval.location())) {
-                        return new ShadowedRegisterValue((RegisterValue) interval.location(), interval.spillSlot());
-                    }
-                }
-                return value;
-            }
-        };
-    }
-
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanEliminateSpillMovePhase.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,237 +0,0 @@
-/*
- * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import static com.oracle.graal.compiler.common.GraalOptions.DetailedAsserts;
-import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
-import static com.oracle.graal.lir.LIRValueUtil.isVariable;
-import static jdk.vm.ci.code.ValueUtil.isRegister;
-
-import java.util.List;
-
-import jdk.vm.ci.code.TargetDescription;
-import jdk.vm.ci.meta.AllocatableValue;
-
-import com.oracle.graal.compiler.common.alloc.TraceBuilder.TraceBuilderResult;
-import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
-import com.oracle.graal.debug.Debug;
-import com.oracle.graal.debug.Indent;
-import com.oracle.graal.lir.LIRInsertionBuffer;
-import com.oracle.graal.lir.LIRInstruction;
-import com.oracle.graal.lir.LIRInstruction.OperandMode;
-import com.oracle.graal.lir.StandardOp.LoadConstantOp;
-import com.oracle.graal.lir.StandardOp.MoveOp;
-import com.oracle.graal.lir.StandardOp.ValueMoveOp;
-import com.oracle.graal.lir.alloc.trace.TraceInterval.SpillState;
-import com.oracle.graal.lir.alloc.trace.TraceLinearScan.IntervalPredicate;
-import com.oracle.graal.lir.gen.LIRGenerationResult;
-
-final class TraceLinearScanEliminateSpillMovePhase extends TraceLinearScanAllocationPhase {
-
-    private static final IntervalPredicate spilledIntervals = new TraceLinearScan.IntervalPredicate() {
-
-        @Override
-        public boolean apply(TraceInterval i) {
-            return i.isSplitParent() && SpillState.IN_MEMORY.contains(i.spillState());
-        }
-    };
-
-    @Override
-    protected <B extends AbstractBlockBase<B>> void run(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder,
-                    TraceLinearScanAllocationContext context) {
-        TraceBuilderResult<?> traceBuilderResult = context.traceBuilderResult;
-        TraceLinearScan allocator = context.allocator;
-        boolean shouldEliminateSpillMoves = shouldEliminateSpillMoves(traceBuilderResult, allocator);
-        eliminateSpillMoves(allocator, shouldEliminateSpillMoves, traceBuilderResult);
-    }
-
-    private static boolean shouldEliminateSpillMoves(TraceBuilderResult<?> traceBuilderResult, TraceLinearScan allocator) {
-        return !traceBuilderResult.incomingSideEdges(traceBuilderResult.getTraceForBlock(allocator.sortedBlocks().get(0)));
-    }
-
-    // called once before assignment of register numbers
-    @SuppressWarnings("try")
-    private static void eliminateSpillMoves(TraceLinearScan allocator, boolean shouldEliminateSpillMoves, TraceBuilderResult<?> traceBuilderResult) {
-        try (Indent indent = Debug.logAndIndent("Eliminating unnecessary spill moves: Trace%d", traceBuilderResult.getTraceForBlock(allocator.sortedBlocks().get(0)))) {
-            allocator.sortIntervalsBySpillPos();
-
-            /*
-             * collect all intervals that must be stored after their definition. The list is sorted
-             * by Interval.spillDefinitionPos.
-             */
-            TraceInterval interval = allocator.createUnhandledListBySpillPos(spilledIntervals);
-            if (DetailedAsserts.getValue()) {
-                checkIntervals(interval);
-            }
-            if (Debug.isLogEnabled()) {
-                try (Indent indent2 = Debug.logAndIndent("Sorted intervals")) {
-                    for (TraceInterval i = interval; i != null; i = i.next) {
-                        Debug.log("%5d: %s", i.spillDefinitionPos(), i);
-                    }
-                }
-            }
-
-            LIRInsertionBuffer insertionBuffer = new LIRInsertionBuffer();
-            for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
-                try (Indent indent1 = Debug.logAndIndent("Handle %s", block)) {
-                    List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
-                    int numInst = instructions.size();
-
-                    int lastOpId = -1;
-                    // iterate all instructions of the block.
-                    for (int j = 0; j < numInst; j++) {
-                        LIRInstruction op = instructions.get(j);
-                        int opId = op.id();
-                        try (Indent indent2 = Debug.logAndIndent("%5d %s", opId, op)) {
-
-                            if (opId == -1) {
-                                MoveOp move = (MoveOp) op;
-                                /*
-                                 * Remove move from register to stack if the stack slot is
-                                 * guaranteed to be correct. Only moves that have been inserted by
-                                 * LinearScan can be removed.
-                                 */
-                                if (shouldEliminateSpillMoves && canEliminateSpillMove(allocator, block, move, lastOpId)) {
-                                    /*
-                                     * Move target is a stack slot that is always correct, so
-                                     * eliminate instruction.
-                                     */
-                                    if (Debug.isLogEnabled()) {
-                                        if (move instanceof ValueMoveOp) {
-                                            ValueMoveOp vmove = (ValueMoveOp) move;
-                                            Debug.log("eliminating move from interval %d (%s) to %d (%s) in block %s", allocator.operandNumber(vmove.getInput()), vmove.getInput(),
-                                                            allocator.operandNumber(vmove.getResult()), vmove.getResult(), block);
-                                        } else {
-                                            LoadConstantOp load = (LoadConstantOp) move;
-                                            Debug.log("eliminating constant load from %s to %d (%s) in block %s", load.getConstant(), allocator.operandNumber(load.getResult()), load.getResult(),
-                                                            block);
-                                        }
-                                    }
-
-                                    // null-instructions are deleted by assignRegNum
-                                    instructions.set(j, null);
-                                }
-
-                            } else {
-                                lastOpId = opId;
-                                /*
-                                 * Insert move from register to stack just after the beginning of
-                                 * the interval.
-                                 */
-                                // assert interval == TraceInterval.EndMarker ||
-                                // interval.spillDefinitionPos() >= opId : "invalid order";
-                                assert interval == TraceInterval.EndMarker || (interval.isSplitParent() && SpillState.IN_MEMORY.contains(interval.spillState())) : "invalid interval";
-
-                                while (interval != TraceInterval.EndMarker && interval.spillDefinitionPos() == opId) {
-                                    Debug.log("handle %s", interval);
-                                    if (!interval.canMaterialize()) {
-                                        if (!insertionBuffer.initialized()) {
-                                            /*
-                                             * prepare insertion buffer (appended when all
-                                             * instructions in the block are processed)
-                                             */
-                                            insertionBuffer.init(instructions);
-                                        }
-
-                                        AllocatableValue fromLocation = interval.getSplitChildAtOpId(opId, OperandMode.DEF, allocator).location();
-                                        AllocatableValue toLocation = TraceLinearScan.canonicalSpillOpr(interval);
-                                        if (!fromLocation.equals(toLocation)) {
-
-                                            assert isRegister(fromLocation) : "from operand must be a register but is: " + fromLocation + " toLocation=" + toLocation + " spillState=" +
-                                                            interval.spillState();
-                                            assert isStackSlotValue(toLocation) : "to operand must be a stack slot";
-
-                                            LIRInstruction move = allocator.getSpillMoveFactory().createMove(toLocation, fromLocation);
-                                            insertionBuffer.append(j + 1, move);
-
-                                            if (Debug.isLogEnabled()) {
-                                                Debug.log("inserting move after definition of interval %d to stack slot %s at opId %d", interval.operandNumber, interval.spillSlot(), opId);
-                                            }
-                                        }
-                                    }
-                                    interval = interval.next;
-                                }
-                            }
-                        }
-                    } // end of instruction iteration
-
-                    if (insertionBuffer.initialized()) {
-                        insertionBuffer.finish();
-                    }
-                }
-            } // end of block iteration
-
-            assert interval == TraceInterval.EndMarker : "missed an interval";
-        }
-    }
-
-    /**
-     * @param allocator
-     * @param block The block {@code move} is located in.
-     * @param move Spill move.
-     * @param lastOpId The id of last "normal" instruction before the spill move. (Spill moves have
-     *            no valid opId but -1.)
-     */
-    private static boolean canEliminateSpillMove(TraceLinearScan allocator, AbstractBlockBase<?> block, MoveOp move, int lastOpId) {
-        assert isVariable(move.getResult()) : "LinearScan inserts only moves to variables: " + move;
-        assert lastOpId >= 0 : "Invalid lastOpId: " + lastOpId;
-
-        TraceInterval curInterval = allocator.intervalFor(move.getResult());
-
-        if (!isRegister(curInterval.location()) && curInterval.inMemoryAt(lastOpId) && isPhiResolutionMove(allocator, move)) {
-            assert isStackSlotValue(curInterval.location()) : "Not a stack slot: " + curInterval.location();
-            return true;
-        }
-        return false;
-    }
-
-    private static boolean isPhiResolutionMove(TraceLinearScan allocator, MoveOp move) {
-        TraceInterval curInterval = allocator.intervalFor(move.getResult());
-        return !curInterval.isSplitParent();
-    }
-
-    private static void checkIntervals(TraceInterval interval) {
-        TraceInterval prev = null;
-        TraceInterval temp = interval;
-        while (temp != TraceInterval.EndMarker) {
-            assert temp.spillDefinitionPos() >= 0 : "invalid spill definition pos";
-            if (prev != null) {
-                // assert temp.from() >= prev.from() : "intervals not sorted";
-                assert temp.spillDefinitionPos() >= prev.spillDefinitionPos() : "when intervals are sorted by from :  then they must also be sorted by spillDefinitionPos";
-            }
-
-            assert temp.spillSlot() != null || temp.canMaterialize() : "interval has no spill slot assigned";
-            assert temp.spillDefinitionPos() >= temp.from() : "invalid order";
-            // assert temp.spillDefinitionPos() <= temp.from() + 2 :
-            // "only intervals defined once at their start-pos can be optimized";
-
-            if (Debug.isLogEnabled()) {
-                Debug.log("interval %d (from %d to %d) must be stored at %d", temp.operandNumber, temp.from(), temp.to(), temp.spillDefinitionPos());
-            }
-
-            prev = temp;
-            temp = temp.next;
-        }
-    }
-
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanLifetimeAnalysisPhase.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,677 +0,0 @@
-/*
- * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import static com.oracle.graal.lir.LIRValueUtil.asVariable;
-import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
-import static com.oracle.graal.lir.LIRValueUtil.isVariable;
-import static com.oracle.graal.lir.alloc.trace.TraceLinearScan.isVariableOrRegister;
-import static com.oracle.graal.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAshareSpillInformation;
-import static com.oracle.graal.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAuseInterTraceHints;
-import static com.oracle.graal.lir.alloc.trace.TraceUtil.asShadowedRegisterValue;
-import static com.oracle.graal.lir.alloc.trace.TraceUtil.isShadowedRegisterValue;
-import static jdk.vm.ci.code.ValueUtil.asRegisterValue;
-import static jdk.vm.ci.code.ValueUtil.asStackSlot;
-import static jdk.vm.ci.code.ValueUtil.isRegister;
-import static jdk.vm.ci.code.ValueUtil.isStackSlot;
-
-import java.util.BitSet;
-import java.util.EnumSet;
-import java.util.List;
-
-import jdk.vm.ci.code.BailoutException;
-import jdk.vm.ci.code.Register;
-import jdk.vm.ci.code.RegisterValue;
-import jdk.vm.ci.code.TargetDescription;
-import jdk.vm.ci.common.JVMCIError;
-import jdk.vm.ci.meta.AllocatableValue;
-import jdk.vm.ci.meta.JavaConstant;
-import jdk.vm.ci.meta.LIRKind;
-import jdk.vm.ci.meta.Value;
-
-import com.oracle.graal.compiler.common.alloc.ComputeBlockOrder;
-import com.oracle.graal.compiler.common.alloc.TraceBuilder.TraceBuilderResult;
-import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
-import com.oracle.graal.debug.Debug;
-import com.oracle.graal.debug.Indent;
-import com.oracle.graal.lir.InstructionValueConsumer;
-import com.oracle.graal.lir.LIR;
-import com.oracle.graal.lir.LIRInstruction;
-import com.oracle.graal.lir.LIRInstruction.OperandFlag;
-import com.oracle.graal.lir.LIRInstruction.OperandMode;
-import com.oracle.graal.lir.LIRValueUtil;
-import com.oracle.graal.lir.StandardOp.BlockEndOp;
-import com.oracle.graal.lir.StandardOp.LabelOp;
-import com.oracle.graal.lir.StandardOp.LoadConstantOp;
-import com.oracle.graal.lir.StandardOp.ValueMoveOp;
-import com.oracle.graal.lir.ValueConsumer;
-import com.oracle.graal.lir.Variable;
-import com.oracle.graal.lir.alloc.trace.TraceInterval.RegisterPriority;
-import com.oracle.graal.lir.alloc.trace.TraceInterval.SpillState;
-import com.oracle.graal.lir.gen.LIRGenerationResult;
-import com.oracle.graal.lir.ssi.SSIUtil;
-
-final class TraceLinearScanLifetimeAnalysisPhase extends TraceLinearScanAllocationPhase {
-
-    @Override
-    protected <B extends AbstractBlockBase<B>> void run(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder,
-                    TraceLinearScanAllocationContext context) {
-        TraceBuilderResult<?> traceBuilderResult = context.traceBuilderResult;
-        TraceLinearScan allocator = context.allocator;
-        new Analyser(allocator, traceBuilderResult).analyze();
-    }
-
-    private static final class Analyser {
-        private static final int DUMP_DURING_ANALYSIS_LEVEL = 4;
-        private final TraceLinearScan allocator;
-        private final TraceBuilderResult<?> traceBuilderResult;
-
-        /**
-         * @param linearScan
-         * @param traceBuilderResult
-         */
-        private Analyser(TraceLinearScan linearScan, TraceBuilderResult<?> traceBuilderResult) {
-            allocator = linearScan;
-            this.traceBuilderResult = traceBuilderResult;
-        }
-
-        private void analyze() {
-            numberInstructions();
-            allocator.printLir("Before register allocation", true);
-            buildIntervals();
-        }
-
-        private boolean sameTrace(AbstractBlockBase<?> a, AbstractBlockBase<?> b) {
-            return traceBuilderResult.getTraceForBlock(b) == traceBuilderResult.getTraceForBlock(a);
-        }
-
-        private boolean isAllocatedOrCurrent(AbstractBlockBase<?> currentBlock, AbstractBlockBase<?> other) {
-            return traceBuilderResult.getTraceForBlock(other) <= traceBuilderResult.getTraceForBlock(currentBlock);
-        }
-
-        private static void setHint(final LIRInstruction op, TraceInterval to, IntervalHint from) {
-            IntervalHint currentHint = to.locationHint(false);
-            if (currentHint == null) {
-                /*
-                 * Update hint if there was none or if the hint interval starts after the hinted
-                 * interval.
-                 */
-                to.setLocationHint(from);
-                if (Debug.isLogEnabled()) {
-                    Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to);
-                }
-            }
-        }
-
-        /**
-         * Numbers all instructions in all blocks. The numbering follows the
-         * {@linkplain ComputeBlockOrder linear scan order}.
-         */
-        private void numberInstructions() {
-
-            allocator.initIntervals();
-
-            ValueConsumer setVariableConsumer = (value, mode, flags) -> {
-                if (isVariable(value)) {
-                    allocator.getOrCreateInterval(asVariable(value));
-                }
-            };
-
-            // Assign IDs to LIR nodes and build a mapping, lirOps, from ID to LIRInstruction node.
-            int numInstructions = 0;
-            for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
-                numInstructions += allocator.getLIR().getLIRforBlock(block).size();
-            }
-
-            // initialize with correct length
-            allocator.initOpIdMaps(numInstructions);
-
-            int opId = 0;
-            int index = 0;
-            for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
-                allocator.initBlockData(block);
-
-                List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
-
-                int numInst = instructions.size();
-                for (int j = 0; j < numInst; j++) {
-                    LIRInstruction op = instructions.get(j);
-                    op.setId(opId);
-
-                    allocator.putOpIdMaps(index, op, block);
-                    assert allocator.instructionForId(opId) == op : "must match";
-
-                    op.visitEachTemp(setVariableConsumer);
-                    op.visitEachOutput(setVariableConsumer);
-
-                    index++;
-                    opId += 2; // numbering of lirOps by two
-                }
-            }
-            assert index == numInstructions : "must match";
-            assert (index << 1) == opId : "must match: " + (index << 1);
-        }
-
-        private void addUse(AllocatableValue operand, int from, int to, RegisterPriority registerPriority, LIRKind kind) {
-            if (!allocator.isProcessed(operand)) {
-                return;
-            }
-            if (isRegister(operand)) {
-                addFixedUse(asRegisterValue(operand), from, to);
-            } else {
-                assert isVariable(operand) : operand;
-                addVariableUse(asVariable(operand), from, to, registerPriority, kind);
-            }
-        }
-
-        private void addFixedUse(RegisterValue reg, int from, int to) {
-            FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
-            interval.addRange(from, to);
-            if (Debug.isLogEnabled()) {
-                Debug.log("add fixed use: %s, at %d", interval, to);
-            }
-        }
-
-        private void addVariableUse(Variable operand, int from, int to, RegisterPriority registerPriority, LIRKind kind) {
-            TraceInterval interval = allocator.getOrCreateInterval(operand);
-
-            if (!kind.equals(LIRKind.Illegal)) {
-                interval.setKind(kind);
-            }
-
-            interval.addRange(from, to);
-
-            // Register use position at even instruction id.
-            interval.addUsePos(to & ~1, registerPriority);
-
-            if (Debug.isLogEnabled()) {
-                Debug.log("add use: %s, at %d (%s)", interval, to, registerPriority.name());
-            }
-        }
-
-        private void addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority, LIRKind kind) {
-            if (!allocator.isProcessed(operand)) {
-                return;
-            }
-            if (isRegister(operand)) {
-                addFixedTemp(asRegisterValue(operand), tempPos);
-            } else {
-                assert isVariable(operand) : operand;
-                addVariableTemp(asVariable(operand), tempPos, registerPriority, kind);
-            }
-        }
-
-        private void addFixedTemp(RegisterValue reg, int tempPos) {
-            FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
-            interval.addRange(tempPos, tempPos + 1);
-            if (Debug.isLogEnabled()) {
-                Debug.log("add fixed temp: %s, at %d", interval, tempPos);
-            }
-        }
-
-        private void addVariableTemp(Variable operand, int tempPos, RegisterPriority registerPriority, LIRKind kind) {
-            TraceInterval interval = allocator.getOrCreateInterval(operand);
-
-            if (!kind.equals(LIRKind.Illegal)) {
-                interval.setKind(kind);
-            }
-
-            if (interval.isEmpty()) {
-                interval.addRange(tempPos, tempPos + 1);
-            } else if (interval.from() > tempPos) {
-                interval.setFrom(tempPos);
-            }
-
-            interval.addUsePos(tempPos, registerPriority);
-            interval.addMaterializationValue(null);
-
-            if (Debug.isLogEnabled()) {
-                Debug.log("add temp: %s tempPos %d (%s)", interval, tempPos, RegisterPriority.MustHaveRegister.name());
-            }
-        }
-
-        private void addDef(AllocatableValue operand, LIRInstruction op, RegisterPriority registerPriority, LIRKind kind) {
-            if (!allocator.isProcessed(operand)) {
-                return;
-            }
-            if (isRegister(operand)) {
-                addFixedDef(asRegisterValue(operand), op);
-            } else {
-                assert isVariable(operand) : operand;
-                addVariableDef(asVariable(operand), op, registerPriority, kind);
-            }
-        }
-
-        private void addFixedDef(RegisterValue reg, LIRInstruction op) {
-            FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
-            int defPos = op.id();
-            if (interval.from() <= defPos) {
-                /*
-                 * Update the starting point (when a range is first created for a use, its start is
-                 * the beginning of the current block until a def is encountered).
-                 */
-                interval.setFrom(defPos);
-
-            } else {
-                /*
-                 * Dead value - make vacuous interval also add register priority for dead intervals
-                 */
-                interval.addRange(defPos, defPos + 1);
-                if (Debug.isLogEnabled()) {
-                    Debug.log("Warning: def of operand %s at %d occurs without use", reg, defPos);
-                }
-            }
-            if (Debug.isLogEnabled()) {
-                Debug.log("add fixed def: %s, at %d", interval, defPos);
-            }
-        }
-
-        private void addVariableDef(Variable operand, LIRInstruction op, RegisterPriority registerPriority, LIRKind kind) {
-            int defPos = op.id();
-
-            TraceInterval interval = allocator.getOrCreateInterval(operand);
-
-            if (!kind.equals(LIRKind.Illegal)) {
-                interval.setKind(kind);
-            }
-
-            if (interval.isEmpty()) {
-                /*
-                 * Dead value - make vacuous interval also add register priority for dead intervals
-                 */
-                interval.addRange(defPos, defPos + 1);
-                interval.addUsePos(defPos, registerPriority);
-                if (Debug.isLogEnabled()) {
-                    Debug.log("Warning: def of operand %s at %d occurs without use", operand, defPos);
-                }
-            } else {
-                /*
-                 * Update the starting point (when a range is first created for a use, its start is
-                 * the beginning of the current block until a def is encountered).
-                 */
-                interval.setFrom(defPos);
-                interval.addUsePos(defPos, registerPriority);
-            }
-
-            changeSpillDefinitionPos(op, operand, interval, defPos);
-            if (registerPriority == RegisterPriority.None && interval.spillState().ordinal() <= SpillState.StartInMemory.ordinal() && isStackSlot(operand)) {
-                // detection of method-parameters and roundfp-results
-                interval.setSpillState(SpillState.StartInMemory);
-            }
-            interval.addMaterializationValue(getMaterializedValue(op, operand, interval));
-
-            if (Debug.isLogEnabled()) {
-                Debug.log("add def: %s defPos %d (%s)", interval, defPos, registerPriority.name());
-            }
-        }
-
-        private void addRegisterHint(final LIRInstruction op, final Value targetValue, OperandMode mode, EnumSet<OperandFlag> flags, final boolean hintAtDef) {
-            if (flags.contains(OperandFlag.HINT) && TraceLinearScan.isVariableOrRegister(targetValue)) {
-
-                op.forEachRegisterHint(targetValue, mode, (registerHint, valueMode, valueFlags) -> {
-                    if (TraceLinearScan.isVariableOrRegister(registerHint)) {
-                        /*
-                         * TODO (je): clean up
-                         */
-                        final AllocatableValue fromValue;
-                        final AllocatableValue toValue;
-                        /* hints always point from def to use */
-                        if (hintAtDef) {
-                            fromValue = (AllocatableValue) registerHint;
-                            toValue = (AllocatableValue) targetValue;
-                        } else {
-                            fromValue = (AllocatableValue) targetValue;
-                            toValue = (AllocatableValue) registerHint;
-                        }
-                        Debug.log("addRegisterHint %s to %s", fromValue, toValue);
-                        final TraceInterval to;
-                        final IntervalHint from;
-                        if (isRegister(toValue)) {
-                            if (isRegister(fromValue)) {
-                                // fixed to fixed move
-                                return null;
-                            }
-                            from = getIntervalHint(toValue);
-                            to = allocator.getOrCreateInterval(fromValue);
-                        } else {
-                            to = allocator.getOrCreateInterval(toValue);
-                            from = getIntervalHint(fromValue);
-                        }
-
-                        to.setLocationHint(from);
-                        if (Debug.isLogEnabled()) {
-                            Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to);
-                        }
-
-                        return registerHint;
-                    }
-                    return null;
-                });
-            }
-        }
-
-        private IntervalHint getIntervalHint(AllocatableValue from) {
-            if (isRegister(from)) {
-                return allocator.getOrCreateFixedInterval(asRegisterValue(from));
-            }
-            return allocator.getOrCreateInterval(from);
-        }
-
-        /**
-         * Eliminates moves from register to stack if the stack slot is known to be correct.
-         *
-         * @param op
-         * @param operand
-         */
-        private void changeSpillDefinitionPos(LIRInstruction op, AllocatableValue operand, TraceInterval interval, int defPos) {
-            assert interval.isSplitParent() : "can only be called for split parents";
-
-            switch (interval.spillState()) {
-                case NoDefinitionFound:
-                    // assert interval.spillDefinitionPos() == -1 : "must no be set before";
-                    interval.setSpillDefinitionPos(defPos);
-                    if (!(op instanceof LabelOp)) {
-                        // Do not update state for labels. This will be done afterwards.
-                        interval.setSpillState(SpillState.NoSpillStore);
-                    }
-                    break;
-
-                case NoSpillStore:
-                    assert defPos <= interval.spillDefinitionPos() : "positions are processed in reverse order when intervals are created";
-                    if (defPos < interval.spillDefinitionPos() - 2) {
-                        /*
-                         * Second definition found, so no spill optimization possible for this
-                         * interval.
-                         */
-                        interval.setSpillState(SpillState.NoOptimization);
-                    } else {
-                        // two consecutive definitions (because of two-operand LIR form)
-                        assert allocator.blockForId(defPos) == allocator.blockForId(interval.spillDefinitionPos()) : "block must be equal";
-                    }
-                    break;
-
-                case NoOptimization:
-                    // nothing to do
-                    break;
-
-                default:
-                    throw new BailoutException("other states not allowed at this time");
-            }
-        }
-
-        private static boolean optimizeMethodArgument(Value value) {
-            /*
-             * Object method arguments that are passed on the stack are currently not optimized
-             * because this requires that the runtime visits method arguments during stack walking.
-             */
-            return isStackSlot(value) && asStackSlot(value).isInCallerFrame() && value.getLIRKind().isValue();
-        }
-
-        /**
-         * Determines the register priority for an instruction's output/result operand.
-         */
-        private static RegisterPriority registerPriorityOfOutputOperand(LIRInstruction op) {
-            if (op instanceof LabelOp) {
-                // skip method header
-                return RegisterPriority.None;
-            }
-            if (op instanceof ValueMoveOp) {
-                ValueMoveOp move = (ValueMoveOp) op;
-                if (optimizeMethodArgument(move.getInput())) {
-                    return RegisterPriority.None;
-                }
-            }
-
-            // all other operands require a register
-            return RegisterPriority.MustHaveRegister;
-        }
-
-        /**
-         * Determines the priority which with an instruction's input operand will be allocated a
-         * register.
-         */
-        private static RegisterPriority registerPriorityOfInputOperand(EnumSet<OperandFlag> flags) {
-            if (flags.contains(OperandFlag.OUTGOING)) {
-                return RegisterPriority.None;
-            }
-            if (flags.contains(OperandFlag.STACK)) {
-                return RegisterPriority.ShouldHaveRegister;
-            }
-            // all other operands require a register
-            return RegisterPriority.MustHaveRegister;
-        }
-
-        @SuppressWarnings("try")
-        private void buildIntervals() {
-
-            try (Indent indent = Debug.logAndIndent("build intervals")) {
-                InstructionValueConsumer outputConsumer = (op, operand, mode, flags) -> {
-                    if (TraceLinearScan.isVariableOrRegister(operand)) {
-                        addDef((AllocatableValue) operand, op, registerPriorityOfOutputOperand(op), operand.getLIRKind());
-                        addRegisterHint(op, operand, mode, flags, true);
-                    }
-                };
-
-                InstructionValueConsumer tempConsumer = (op, operand, mode, flags) -> {
-                    if (TraceLinearScan.isVariableOrRegister(operand)) {
-                        addTemp((AllocatableValue) operand, op.id(), RegisterPriority.MustHaveRegister, operand.getLIRKind());
-                        addRegisterHint(op, operand, mode, flags, false);
-                    }
-                };
-
-                InstructionValueConsumer aliveConsumer = (op, operand, mode, flags) -> {
-                    if (TraceLinearScan.isVariableOrRegister(operand)) {
-                        RegisterPriority p = registerPriorityOfInputOperand(flags);
-                        int opId = op.id();
-                        int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
-                        addUse((AllocatableValue) operand, blockFrom, opId + 1, p, operand.getLIRKind());
-                        addRegisterHint(op, operand, mode, flags, false);
-                    }
-                };
-
-                InstructionValueConsumer inputConsumer = (op, operand, mode, flags) -> {
-                    if (TraceLinearScan.isVariableOrRegister(operand)) {
-                        int opId = op.id();
-                        RegisterPriority p = registerPriorityOfInputOperand(flags);
-                        int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
-                        addUse((AllocatableValue) operand, blockFrom, opId, p, operand.getLIRKind());
-                        addRegisterHint(op, operand, mode, flags, false);
-                    }
-                };
-
-                InstructionValueConsumer stateProc = (op, operand, mode, flags) -> {
-                    if (TraceLinearScan.isVariableOrRegister(operand)) {
-                        int opId = op.id();
-                        int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
-                        addUse((AllocatableValue) operand, blockFrom, opId + 1, RegisterPriority.None, operand.getLIRKind());
-                    }
-                };
-
-                // create a list with all caller-save registers (cpu, fpu, xmm)
-                Register[] callerSaveRegs = allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters();
-
-                // iterate all blocks in reverse order
-                for (int i = allocator.blockCount() - 1; i >= 0; i--) {
-
-                    AbstractBlockBase<?> block = allocator.blockAt(i);
-                    // TODO (je) make empty bitset - remove
-                    allocator.getBlockData(block).liveIn = new BitSet();
-                    allocator.getBlockData(block).liveOut = new BitSet();
-                    try (Indent indent2 = Debug.logAndIndent("handle block %d", block.getId())) {
-
-                        List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
-
-                        /*
-                         * Iterate all instructions of the block in reverse order. definitions of
-                         * intervals are processed before uses.
-                         */
-                        for (int j = instructions.size() - 1; j >= 0; j--) {
-                            final LIRInstruction op = instructions.get(j);
-                            final int opId = op.id();
-
-                            try (Indent indent3 = Debug.logAndIndent("handle inst %d: %s", opId, op)) {
-
-                                // add a temp range for each register if operation destroys
-                                // caller-save registers
-                                if (op.destroysCallerSavedRegisters()) {
-                                    for (Register r : callerSaveRegs) {
-                                        if (allocator.attributes(r).isAllocatable()) {
-                                            addTemp(r.asValue(), opId, RegisterPriority.None, LIRKind.Illegal);
-                                        }
-                                    }
-                                    if (Debug.isLogEnabled()) {
-                                        Debug.log("operation destroys all caller-save registers");
-                                    }
-                                }
-
-                                op.visitEachOutput(outputConsumer);
-                                op.visitEachTemp(tempConsumer);
-                                op.visitEachAlive(aliveConsumer);
-                                op.visitEachInput(inputConsumer);
-
-                                /*
-                                 * Add uses of live locals from interpreter's point of view for
-                                 * proper debug information generation. Treat these operands as temp
-                                 * values (if the live range is extended to a call site, the value
-                                 * would be in a register at the call otherwise).
-                                 */
-                                op.visitEachState(stateProc);
-                            }
-
-                        } // end of instruction iteration
-                    }
-                    if (Debug.isDumpEnabled(DUMP_DURING_ANALYSIS_LEVEL)) {
-                        allocator.printIntervals("After Block " + block);
-                    }
-                } // end of block iteration
-
-                // fix spill state for phi/sigma intervals
-                for (TraceInterval interval : allocator.intervals()) {
-                    if (interval != null && interval.spillState().equals(SpillState.NoDefinitionFound) && interval.spillDefinitionPos() != -1) {
-                        // there was a definition in a phi/sigma
-                        interval.setSpillState(SpillState.NoSpillStore);
-                    }
-                }
-                if (TraceRAuseInterTraceHints.getValue()) {
-                    addInterTraceHints();
-                }
-                /*
-                 * Add the range [-1, 0] to all fixed intervals. the register allocator need not
-                 * handle unhandled fixed intervals.
-                 */
-                for (FixedInterval interval : allocator.fixedIntervals()) {
-                    if (interval != null) {
-                        /* We use [-1, 0] to avoid intersection with incoming values. */
-                        interval.addRange(-1, 0);
-                    }
-                }
-            }
-        }
-
-        private void addInterTraceHints() {
-            // set hints for phi/sigma intervals
-            LIR lir = allocator.getLIR();
-            for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
-                LabelOp label = SSIUtil.incoming(lir, block);
-                for (AbstractBlockBase<?> pred : block.getPredecessors()) {
-                    if (isAllocatedOrCurrent(block, pred)) {
-                        BlockEndOp outgoing = SSIUtil.outgoing(lir, pred);
-                        for (int i = 0; i < outgoing.getOutgoingSize(); i++) {
-                            Value toValue = label.getIncomingValue(i);
-                            assert !isShadowedRegisterValue(toValue) : "Shadowed Registers are not allowed here: " + toValue;
-                            if (isVariable(toValue)) {
-                                Value fromValue = outgoing.getOutgoingValue(i);
-                                assert sameTrace(block, pred) || !isVariable(fromValue) : "Unallocated variable: " + fromValue;
-                                if (!LIRValueUtil.isConstantValue(fromValue)) {
-                                    addInterTraceHint(label, (AllocatableValue) toValue, fromValue);
-                                }
-                            }
-                        }
-                    }
-                }
-            }
-        }
-
-        private void addInterTraceHint(LabelOp label, AllocatableValue toValue, Value fromValue) {
-            assert isVariable(toValue) : "Wrong toValue: " + toValue;
-            assert isRegister(fromValue) || isVariable(fromValue) || isStackSlotValue(fromValue) || isShadowedRegisterValue(fromValue) : "Wrong fromValue: " + fromValue;
-            if (isVariableOrRegister(fromValue)) {
-                TraceInterval to = allocator.getOrCreateInterval(toValue);
-                IntervalHint from = getIntervalHint((AllocatableValue) fromValue);
-                setHint(label, to, from);
-            } else if (isStackSlotValue(fromValue)) {
-                TraceInterval to = allocator.getOrCreateInterval(toValue);
-                to.setSpillSlot((AllocatableValue) fromValue);
-                to.setSpillState(SpillState.StartInMemory);
-            } else if (TraceRAshareSpillInformation.getValue() && isShadowedRegisterValue(fromValue)) {
-                ShadowedRegisterValue shadowedRegisterValue = asShadowedRegisterValue(fromValue);
-                IntervalHint from = getIntervalHint(shadowedRegisterValue.getRegister());
-                TraceInterval to = allocator.getOrCreateInterval(toValue);
-                setHint(label, to, from);
-                to.setSpillSlot(shadowedRegisterValue.getStackSlot());
-                to.setSpillState(SpillState.StartInMemory);
-            } else {
-                throw JVMCIError.shouldNotReachHere();
-            }
-        }
-
-        /**
-         * Returns a value for a interval definition, which can be used for re-materialization.
-         *
-         * @param op An instruction which defines a value
-         * @param operand The destination operand of the instruction
-         * @param interval The interval for this defined value.
-         * @return Returns the value which is moved to the instruction and which can be reused at
-         *         all reload-locations in case the interval of this instruction is spilled.
-         *         Currently this can only be a {@link JavaConstant}.
-         */
-        private JavaConstant getMaterializedValue(LIRInstruction op, Value operand, TraceInterval interval) {
-            if (op instanceof LoadConstantOp) {
-                LoadConstantOp move = (LoadConstantOp) op;
-                if (move.getConstant() instanceof JavaConstant) {
-                    if (!allocator.neverSpillConstants()) {
-                        if (!allocator.getSpillMoveFactory().allowConstantToStackMove(move.getConstant())) {
-                            return null;
-                        }
-                        /*
-                         * Check if the interval has any uses which would accept an stack location
-                         * (priority == ShouldHaveRegister). Rematerialization of such intervals can
-                         * result in a degradation, because rematerialization always inserts a
-                         * constant load, even if the value is not needed in a register.
-                         */
-                        UsePosList usePosList = interval.usePosList();
-                        int numUsePos = usePosList.size();
-                        for (int useIdx = 0; useIdx < numUsePos; useIdx++) {
-                            TraceInterval.RegisterPriority priority = usePosList.registerPriority(useIdx);
-                            if (priority == TraceInterval.RegisterPriority.ShouldHaveRegister) {
-                                return null;
-                            }
-                        }
-                    }
-                    return (JavaConstant) move.getConstant();
-                }
-            }
-            return null;
-        }
-    }
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanRegisterAllocationPhase.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import java.util.List;
-
-import jdk.vm.ci.code.TargetDescription;
-
-import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
-import com.oracle.graal.debug.Debug;
-import com.oracle.graal.debug.Indent;
-import com.oracle.graal.lir.gen.LIRGenerationResult;
-
-final class TraceLinearScanRegisterAllocationPhase extends TraceLinearScanAllocationPhase {
-
-    @Override
-    protected <B extends AbstractBlockBase<B>> void run(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder,
-                    TraceLinearScanAllocationContext context) {
-        TraceLinearScan allocator = context.allocator;
-        allocator.printIntervals("Before register allocation");
-        allocateRegisters(allocator);
-        allocator.printIntervals("After register allocation");
-    }
-
-    @SuppressWarnings("try")
-    private static void allocateRegisters(TraceLinearScan allocator) {
-        try (Indent indent = Debug.logAndIndent("allocate registers")) {
-            FixedInterval precoloredIntervals = allocator.createFixedUnhandledList();
-            TraceInterval notPrecoloredIntervals = allocator.createUnhandledListByFrom(TraceLinearScan.IS_VARIABLE_INTERVAL);
-
-            // allocate cpu registers
-            TraceLinearScanWalker lsw = new TraceLinearScanWalker(allocator, precoloredIntervals, notPrecoloredIntervals);
-            lsw.walk();
-            lsw.finishAllocation();
-        }
-    }
-
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanResolveDataFlowPhase.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,240 +0,0 @@
-/*
- * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import static com.oracle.graal.compiler.common.GraalOptions.DetailedAsserts;
-import static com.oracle.graal.lir.LIRValueUtil.asConstant;
-import static com.oracle.graal.lir.LIRValueUtil.isConstantValue;
-import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
-import static com.oracle.graal.lir.LIRValueUtil.isVirtualStackSlot;
-import static jdk.vm.ci.code.ValueUtil.isRegister;
-
-import java.util.BitSet;
-import java.util.List;
-import java.util.ListIterator;
-
-import jdk.vm.ci.code.TargetDescription;
-import jdk.vm.ci.meta.Value;
-
-import com.oracle.graal.compiler.common.alloc.TraceBuilder.TraceBuilderResult;
-import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
-import com.oracle.graal.debug.Debug;
-import com.oracle.graal.debug.DebugMetric;
-import com.oracle.graal.debug.Indent;
-import com.oracle.graal.lir.LIRInstruction;
-import com.oracle.graal.lir.StandardOp;
-import com.oracle.graal.lir.gen.LIRGenerationResult;
-import com.oracle.graal.lir.ssa.SSAUtil.PhiValueVisitor;
-import com.oracle.graal.lir.ssi.SSIUtil;
-
-/**
- * Phase 6: resolve data flow
- *
- * Insert moves at edges between blocks if intervals have been split.
- */
-final class TraceLinearScanResolveDataFlowPhase extends TraceLinearScanAllocationPhase {
-
-    @Override
-    protected <B extends AbstractBlockBase<B>> void run(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder,
-                    TraceLinearScanAllocationContext context) {
-        TraceBuilderResult<?> traceBuilderResult = context.traceBuilderResult;
-        TraceLinearScan allocator = context.allocator;
-        new Resolver(allocator, traceBuilderResult).resolveDataFlow(allocator.sortedBlocks());
-    }
-
-    private static final class Resolver {
-        private final TraceLinearScan allocator;
-        private final TraceBuilderResult<?> traceBuilderResult;
-
-        private Resolver(TraceLinearScan allocator, TraceBuilderResult<?> traceBuilderResult) {
-            this.allocator = allocator;
-            this.traceBuilderResult = traceBuilderResult;
-        }
-
-        private void resolveFindInsertPos(AbstractBlockBase<?> fromBlock, AbstractBlockBase<?> toBlock, TraceLocalMoveResolver moveResolver) {
-            if (fromBlock.getSuccessorCount() <= 1) {
-                if (Debug.isLogEnabled()) {
-                    Debug.log("inserting moves at end of fromBlock B%d", fromBlock.getId());
-                }
-
-                List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(fromBlock);
-                LIRInstruction instr = instructions.get(instructions.size() - 1);
-                if (instr instanceof StandardOp.JumpOp) {
-                    // insert moves before branch
-                    moveResolver.setInsertPosition(instructions, instructions.size() - 1);
-                } else {
-                    moveResolver.setInsertPosition(instructions, instructions.size());
-                }
-
-            } else {
-                if (Debug.isLogEnabled()) {
-                    Debug.log("inserting moves at beginning of toBlock B%d", toBlock.getId());
-                }
-
-                if (DetailedAsserts.getValue()) {
-                    assert allocator.getLIR().getLIRforBlock(fromBlock).get(0) instanceof StandardOp.LabelOp : "block does not start with a label";
-
-                    /*
-                     * Because the number of predecessor edges matches the number of successor
-                     * edges, blocks which are reached by switch statements may have be more than
-                     * one predecessor but it will be guaranteed that all predecessors will be the
-                     * same.
-                     */
-                    for (AbstractBlockBase<?> predecessor : toBlock.getPredecessors()) {
-                        assert fromBlock == predecessor : "all critical edges must be broken";
-                    }
-                }
-
-                moveResolver.setInsertPosition(allocator.getLIR().getLIRforBlock(toBlock), 1);
-            }
-        }
-
-        /**
-         * Inserts necessary moves (spilling or reloading) at edges between blocks for intervals
-         * that have been split.
-         */
-        @SuppressWarnings("try")
-        private void resolveDataFlow(List<? extends AbstractBlockBase<?>> blocks) {
-            if (blocks.size() < 2) {
-                // no resolution necessary
-                return;
-            }
-            try (Indent indent = Debug.logAndIndent("resolve data flow")) {
-
-                TraceLocalMoveResolver moveResolver = allocator.createMoveResolver();
-                ListIterator<? extends AbstractBlockBase<?>> it = blocks.listIterator();
-                AbstractBlockBase<?> toBlock = null;
-                for (AbstractBlockBase<?> fromBlock = it.next(); it.hasNext(); fromBlock = toBlock) {
-                    toBlock = it.next();
-                    assert containedInTrace(fromBlock) : "Not in Trace: " + fromBlock;
-                    assert containedInTrace(toBlock) : "Not in Trace: " + toBlock;
-                    resolveCollectMappings(fromBlock, toBlock, moveResolver);
-                }
-                assert blocks.get(blocks.size() - 1).equals(toBlock);
-                if (toBlock.isLoopEnd()) {
-                    assert toBlock.getSuccessorCount() == 1;
-                    AbstractBlockBase<?> loopHeader = toBlock.getSuccessors().get(0);
-                    if (containedInTrace(loopHeader)) {
-                        resolveCollectMappings(toBlock, loopHeader, moveResolver);
-                    }
-                }
-
-            }
-        }
-
-        @SuppressWarnings("try")
-        private void resolveCollectMappings(AbstractBlockBase<?> fromBlock, AbstractBlockBase<?> toBlock, TraceLocalMoveResolver moveResolver) {
-            try (Indent indent0 = Debug.logAndIndent("Edge %s -> %s", fromBlock, toBlock)) {
-                collectLSRAMappings(fromBlock, toBlock, moveResolver);
-                collectSSIMappings(fromBlock, toBlock, moveResolver);
-            }
-        }
-
-        protected void collectLSRAMappings(AbstractBlockBase<?> fromBlock, AbstractBlockBase<?> toBlock, TraceLocalMoveResolver moveResolver) {
-            assert moveResolver.checkEmpty();
-
-            int toBlockFirstInstructionId = allocator.getFirstLirInstructionId(toBlock);
-            int fromBlockLastInstructionId = allocator.getLastLirInstructionId(fromBlock) + 1;
-            int numOperands = allocator.operandSize();
-            BitSet liveAtEdge = allocator.getBlockData(toBlock).liveIn;
-
-            // visit all variables for which the liveAtEdge bit is set
-            for (int operandNum = liveAtEdge.nextSetBit(0); operandNum >= 0; operandNum = liveAtEdge.nextSetBit(operandNum + 1)) {
-                assert operandNum < numOperands : "live information set for not exisiting interval";
-                assert allocator.getBlockData(fromBlock).liveOut.get(operandNum) && allocator.getBlockData(toBlock).liveIn.get(operandNum) : "interval not live at this edge";
-
-                TraceInterval fromInterval = allocator.splitChildAtOpId(allocator.intervalFor(operandNum), fromBlockLastInstructionId, LIRInstruction.OperandMode.DEF);
-                TraceInterval toInterval = allocator.splitChildAtOpId(allocator.intervalFor(operandNum), toBlockFirstInstructionId, LIRInstruction.OperandMode.DEF);
-
-                if (fromInterval != toInterval && !fromInterval.location().equals(toInterval.location())) {
-                    // need to insert move instruction
-                    moveResolver.addMapping(fromInterval, toInterval);
-                }
-            }
-        }
-
-        protected void collectSSIMappings(AbstractBlockBase<?> fromBlock, AbstractBlockBase<?> toBlock, TraceLocalMoveResolver moveResolver) {
-            // collect all intervals that have been split between
-            // fromBlock and toBlock
-            SSIUtil.forEachValuePair(allocator.getLIR(), toBlock, fromBlock, new MyPhiValueVisitor(moveResolver, toBlock, fromBlock));
-            if (moveResolver.hasMappings()) {
-                resolveFindInsertPos(fromBlock, toBlock, moveResolver);
-                moveResolver.resolveAndAppendMoves();
-            }
-        }
-
-        private boolean containedInTrace(AbstractBlockBase<?> block) {
-            return currentTrace() == traceBuilderResult.getTraceForBlock(block);
-        }
-
-        private int currentTrace() {
-            return traceBuilderResult.getTraceForBlock(allocator.sortedBlocks().get(0));
-        }
-
-        private static final DebugMetric numSSIResolutionMoves = Debug.metric("SSI LSRA[numSSIResolutionMoves]");
-        private static final DebugMetric numStackToStackMoves = Debug.metric("SSI LSRA[numStackToStackMoves]");
-
-        private class MyPhiValueVisitor implements PhiValueVisitor {
-            final TraceLocalMoveResolver moveResolver;
-            final int toId;
-            final int fromId;
-
-            public MyPhiValueVisitor(TraceLocalMoveResolver moveResolver, AbstractBlockBase<?> toBlock, AbstractBlockBase<?> fromBlock) {
-                this.moveResolver = moveResolver;
-                toId = allocator.getFirstLirInstructionId(toBlock);
-                fromId = allocator.getLastLirInstructionId(fromBlock);
-                assert fromId >= 0;
-            }
-
-            public void visit(Value phiIn, Value phiOut) {
-                assert !isRegister(phiOut) : "Out is a register: " + phiOut;
-                assert !isRegister(phiIn) : "In is a register: " + phiIn;
-                if (Value.ILLEGAL.equals(phiIn)) {
-                    // The value not needed in this branch.
-                    return;
-                }
-                if (isVirtualStackSlot(phiIn) && isVirtualStackSlot(phiOut) && phiIn.equals(phiOut)) {
-                    // no need to handle virtual stack slots
-                    return;
-                }
-                TraceInterval toInterval = allocator.splitChildAtOpId(allocator.intervalFor(phiIn), toId, LIRInstruction.OperandMode.DEF);
-                if (isConstantValue(phiOut)) {
-                    numSSIResolutionMoves.increment();
-                    moveResolver.addMapping(asConstant(phiOut), toInterval);
-                } else {
-                    TraceInterval fromInterval = allocator.splitChildAtOpId(allocator.intervalFor(phiOut), fromId, LIRInstruction.OperandMode.DEF);
-                    if (fromInterval != toInterval) {
-                        numSSIResolutionMoves.increment();
-                        if (!(isStackSlotValue(toInterval.location()) && isStackSlotValue(fromInterval.location()))) {
-                            moveResolver.addMapping(fromInterval, toInterval);
-                        } else {
-                            numStackToStackMoves.increment();
-                            moveResolver.addMapping(fromInterval, toInterval);
-                        }
-                    }
-                }
-            }
-        }
-    }
-
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLinearScanWalker.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1129 +0,0 @@
-/*
- * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
-import static com.oracle.graal.lir.LIRValueUtil.isVariable;
-import static jdk.vm.ci.code.CodeUtil.isOdd;
-import static jdk.vm.ci.code.ValueUtil.asRegister;
-import static jdk.vm.ci.code.ValueUtil.isRegister;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.List;
-
-import jdk.vm.ci.code.BailoutException;
-import jdk.vm.ci.code.Register;
-import jdk.vm.ci.common.JVMCIError;
-import jdk.vm.ci.meta.Value;
-
-import com.oracle.graal.compiler.common.alloc.RegisterAllocationConfig.AllocatableRegisters;
-import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
-import com.oracle.graal.compiler.common.util.Util;
-import com.oracle.graal.debug.Debug;
-import com.oracle.graal.debug.Indent;
-import com.oracle.graal.lir.LIRInstruction;
-import com.oracle.graal.lir.StandardOp.BlockEndOp;
-import com.oracle.graal.lir.StandardOp.LabelOp;
-import com.oracle.graal.lir.StandardOp.ValueMoveOp;
-import com.oracle.graal.lir.alloc.lsra.OutOfRegistersException;
-import com.oracle.graal.lir.alloc.trace.TraceInterval.RegisterPriority;
-import com.oracle.graal.lir.alloc.trace.TraceInterval.SpillState;
-import com.oracle.graal.lir.alloc.trace.TraceInterval.State;
-
-/**
- */
-final class TraceLinearScanWalker extends TraceIntervalWalker {
-
-    private Register[] availableRegs;
-
-    private final int[] usePos;
-    private final int[] blockPos;
-    private final BitSet isInMemory;
-
-    private List<TraceInterval>[] spillIntervals;
-
-    private TraceLocalMoveResolver moveResolver; // for ordering spill moves
-
-    private int minReg;
-
-    private int maxReg;
-
-    /**
-     * Only 10% of the lists in {@link #spillIntervals} are actually used. But when they are used,
-     * they can grow quite long. The maximum length observed was 45 (all numbers taken from a
-     * bootstrap run of Graal). Therefore, we initialize {@link #spillIntervals} with this marker
-     * value, and allocate a "real" list only on demand in {@link #setUsePos}.
-     */
-    private static final List<TraceInterval> EMPTY_LIST = new ArrayList<>(0);
-
-    // accessors mapped to same functions in class LinearScan
-    private int blockCount() {
-        return allocator.blockCount();
-    }
-
-    private AbstractBlockBase<?> blockAt(int idx) {
-        return allocator.blockAt(idx);
-    }
-
-    @SuppressWarnings("unused")
-    private AbstractBlockBase<?> blockOfOpWithId(int opId) {
-        return allocator.blockForId(opId);
-    }
-
-    TraceLinearScanWalker(TraceLinearScan allocator, FixedInterval unhandledFixedFirst, TraceInterval unhandledAnyFirst) {
-        super(allocator, unhandledFixedFirst, unhandledAnyFirst);
-
-        moveResolver = allocator.createMoveResolver();
-        int numRegs = allocator.getRegisters().length;
-        spillIntervals = Util.uncheckedCast(new List<?>[numRegs]);
-        for (int i = 0; i < numRegs; i++) {
-            spillIntervals[i] = EMPTY_LIST;
-        }
-        usePos = new int[numRegs];
-        blockPos = new int[numRegs];
-        isInMemory = new BitSet(numRegs);
-    }
-
-    private void initUseLists(boolean onlyProcessUsePos) {
-        for (Register register : availableRegs) {
-            int i = register.number;
-            usePos[i] = Integer.MAX_VALUE;
-
-            if (!onlyProcessUsePos) {
-                blockPos[i] = Integer.MAX_VALUE;
-                spillIntervals[i].clear();
-                isInMemory.clear(i);
-            }
-        }
-    }
-
-    private int maxRegisterNumber() {
-        return maxReg;
-    }
-
-    private int minRegisterNumber() {
-        return minReg;
-    }
-
-    private boolean isRegisterInRange(int reg) {
-        return reg >= minRegisterNumber() && reg <= maxRegisterNumber();
-    }
-
-    private void excludeFromUse(IntervalHint i) {
-        Value location = i.location();
-        int i1 = asRegister(location).number;
-        if (isRegisterInRange(i1)) {
-            usePos[i1] = 0;
-        }
-    }
-
-    private void setUsePos(TraceInterval interval, int usePos, boolean onlyProcessUsePos) {
-        if (usePos != -1) {
-            assert usePos != 0 : "must use excludeFromUse to set usePos to 0";
-            int i = asRegister(interval.location()).number;
-            if (isRegisterInRange(i)) {
-                if (this.usePos[i] > usePos) {
-                    this.usePos[i] = usePos;
-                }
-                if (!onlyProcessUsePos) {
-                    List<TraceInterval> list = spillIntervals[i];
-                    if (list == EMPTY_LIST) {
-                        list = new ArrayList<>(2);
-                        spillIntervals[i] = list;
-                    }
-                    list.add(interval);
-                    // set is in memory flag
-                    if (interval.inMemoryAt(currentPosition)) {
-                        isInMemory.set(i);
-                    }
-                }
-            }
-        }
-    }
-
-    private void setUsePos(FixedInterval interval, int usePos, boolean onlyProcessUsePos) {
-        assert onlyProcessUsePos;
-        if (usePos != -1) {
-            assert usePos != 0 : "must use excludeFromUse to set usePos to 0";
-            int i = asRegister(interval.location()).number;
-            if (isRegisterInRange(i)) {
-                if (this.usePos[i] > usePos) {
-                    this.usePos[i] = usePos;
-                }
-            }
-        }
-    }
-
-    private void setBlockPos(IntervalHint i, int blockPos) {
-        if (blockPos != -1) {
-            int reg = asRegister(i.location()).number;
-            if (isRegisterInRange(reg)) {
-                if (this.blockPos[reg] > blockPos) {
-                    this.blockPos[reg] = blockPos;
-                }
-                if (usePos[reg] > blockPos) {
-                    usePos[reg] = blockPos;
-                }
-            }
-        }
-    }
-
-    private void freeExcludeActiveFixed() {
-        FixedInterval interval = activeFixedList.getFixed();
-        while (interval != FixedInterval.EndMarker) {
-            assert isRegister(interval.location()) : "active interval must have a register assigned";
-            excludeFromUse(interval);
-            interval = interval.next;
-        }
-    }
-
-    private void freeExcludeActiveAny() {
-        TraceInterval interval = activeAnyList.getAny();
-        while (interval != TraceInterval.EndMarker) {
-            assert isRegister(interval.location()) : "active interval must have a register assigned";
-            excludeFromUse(interval);
-            interval = interval.next;
-        }
-    }
-
-    private void freeCollectInactiveFixed(TraceInterval current) {
-        FixedInterval interval = inactiveFixedList.getFixed();
-        while (interval != FixedInterval.EndMarker) {
-            if (current.to() <= interval.from()) {
-                assert interval.intersectsAt(current) == -1 : "must not intersect";
-                setUsePos(interval, interval.from(), true);
-            } else {
-                setUsePos(interval, interval.currentIntersectsAt(current), true);
-            }
-            interval = interval.next;
-        }
-    }
-
-    private void spillExcludeActiveFixed() {
-        FixedInterval interval = activeFixedList.getFixed();
-        while (interval != FixedInterval.EndMarker) {
-            excludeFromUse(interval);
-            interval = interval.next;
-        }
-    }
-
-    private void spillBlockInactiveFixed(TraceInterval current) {
-        FixedInterval interval = inactiveFixedList.getFixed();
-        while (interval != FixedInterval.EndMarker) {
-            if (current.to() > interval.currentFrom()) {
-                setBlockPos(interval, interval.currentIntersectsAt(current));
-            } else {
-                assert interval.currentIntersectsAt(current) == -1 : "invalid optimization: intervals intersect";
-            }
-
-            interval = interval.next;
-        }
-    }
-
-    private void spillCollectActiveAny(RegisterPriority registerPriority) {
-        TraceInterval interval = activeAnyList.getAny();
-        while (interval != TraceInterval.EndMarker) {
-            setUsePos(interval, Math.min(interval.nextUsage(registerPriority, currentPosition), interval.to()), false);
-            interval = interval.next;
-        }
-    }
-
-    @SuppressWarnings("unused")
-    private int insertIdAtBasicBlockBoundary(int opId) {
-        assert allocator.isBlockBegin(opId) : "Not a block begin: " + opId;
-        assert allocator.instructionForId(opId) instanceof LabelOp;
-        assert allocator.instructionForId(opId - 2) instanceof BlockEndOp;
-
-        AbstractBlockBase<?> toBlock = allocator.blockForId(opId);
-        AbstractBlockBase<?> fromBlock = allocator.blockForId(opId - 2);
-
-        if (fromBlock.getSuccessorCount() == 1) {
-            // insert move in predecessor
-            return opId - 2;
-        }
-        assert toBlock.getPredecessorCount() == 1 : String.format("Critical Edge? %s->%s", fromBlock, toBlock);
-        // insert move in successor
-        return opId + 2;
-    }
-
-    private void insertMove(int operandId, TraceInterval srcIt, TraceInterval dstIt) {
-        // output all moves here. When source and target are equal, the move is
-        // optimized away later in assignRegNums
-
-        int opId = (operandId + 1) & ~1;
-        AbstractBlockBase<?> opBlock = allocator.blockForId(opId);
-        assert opId > 0 && allocator.blockForId(opId - 2) == opBlock : "cannot insert move at block boundary";
-
-        // calculate index of instruction inside instruction list of current block
-        // the minimal index (for a block with no spill moves) can be calculated because the
-        // numbering of instructions is known.
-        // When the block already contains spill moves, the index must be increased until the
-        // correct index is reached.
-        List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(opBlock);
-        int index = (opId - instructions.get(0).id()) >> 1;
-        assert instructions.get(index).id() <= opId : "error in calculation";
-
-        while (instructions.get(index).id() != opId) {
-            index++;
-            assert 0 <= index && index < instructions.size() : "index out of bounds";
-        }
-        assert 1 <= index && index < instructions.size() : "index out of bounds";
-        assert instructions.get(index).id() == opId : "error in calculation";
-
-        // insert new instruction before instruction at position index
-        moveResolver.moveInsertPosition(instructions, index);
-        moveResolver.addMapping(srcIt, dstIt);
-    }
-
-    private int findOptimalSplitPos(AbstractBlockBase<?> minBlock, AbstractBlockBase<?> maxBlock, int maxSplitPos) {
-        int fromBlockNr = minBlock.getLinearScanNumber();
-        int toBlockNr = maxBlock.getLinearScanNumber();
-
-        assert 0 <= fromBlockNr && fromBlockNr < blockCount() : "out of range";
-        assert 0 <= toBlockNr && toBlockNr < blockCount() : "out of range";
-        assert fromBlockNr < toBlockNr : "must cross block boundary";
-
-        // Try to split at end of maxBlock. If this would be after
-        // maxSplitPos, then use the begin of maxBlock
-        int optimalSplitPos = allocator.getLastLirInstructionId(maxBlock) + 2;
-        if (optimalSplitPos > maxSplitPos) {
-            optimalSplitPos = allocator.getFirstLirInstructionId(maxBlock);
-        }
-
-        // minimal block probability
-        double minProbability = maxBlock.probability();
-        for (int i = toBlockNr - 1; i >= fromBlockNr; i--) {
-            AbstractBlockBase<?> cur = blockAt(i);
-
-            if (cur.probability() < minProbability) {
-                // Block with lower probability found. Split at the end of this block.
-                minProbability = cur.probability();
-                optimalSplitPos = allocator.getLastLirInstructionId(cur) + 2;
-            }
-        }
-        assert optimalSplitPos > allocator.maxOpId() || allocator.isBlockBegin(optimalSplitPos) : "algorithm must move split pos to block boundary";
-
-        return optimalSplitPos;
-    }
-
-    @SuppressWarnings({"unused"})
-    private int findOptimalSplitPos(TraceInterval interval, int minSplitPos, int maxSplitPos, boolean doLoopOptimization) {
-        int optimalSplitPos = findOptimalSplitPos0(minSplitPos, maxSplitPos);
-        if (Debug.isLogEnabled()) {
-            Debug.log("optimal split position: %d", optimalSplitPos);
-        }
-        return optimalSplitPos;
-    }
-
-    private int findOptimalSplitPos0(int minSplitPos, int maxSplitPos) {
-        // TODO (je) implement
-        if (minSplitPos == maxSplitPos) {
-            // trivial case, no optimization of split position possible
-            if (Debug.isLogEnabled()) {
-                Debug.log("min-pos and max-pos are equal, no optimization possible");
-            }
-            return minSplitPos;
-
-        }
-        assert minSplitPos < maxSplitPos : "must be true then";
-        assert minSplitPos > 0 : "cannot access minSplitPos - 1 otherwise";
-
-        // reason for using minSplitPos - 1: when the minimal split pos is exactly at the
-        // beginning of a block, then minSplitPos is also a possible split position.
-        // Use the block before as minBlock, because then minBlock.lastLirInstructionId() + 2 ==
-        // minSplitPos
-        AbstractBlockBase<?> minBlock = allocator.blockForId(minSplitPos - 1);
-
-        // reason for using maxSplitPos - 1: otherwise there would be an assert on failure
-        // when an interval ends at the end of the last block of the method
-        // (in this case, maxSplitPos == allocator().maxLirOpId() + 2, and there is no
-        // block at this opId)
-        AbstractBlockBase<?> maxBlock = allocator.blockForId(maxSplitPos - 1);
-
-        assert minBlock.getLinearScanNumber() <= maxBlock.getLinearScanNumber() : "invalid order";
-        if (minBlock == maxBlock) {
-            // split position cannot be moved to block boundary : so split as late as possible
-            if (Debug.isLogEnabled()) {
-                Debug.log("cannot move split pos to block boundary because minPos and maxPos are in same block");
-            }
-            return maxSplitPos;
-
-        }
-        // seach optimal block boundary between minSplitPos and maxSplitPos
-        if (Debug.isLogEnabled()) {
-            Debug.log("moving split pos to optimal block boundary between block B%d and B%d", minBlock.getId(), maxBlock.getId());
-        }
-
-        return findOptimalSplitPos(minBlock, maxBlock, maxSplitPos);
-    }
-
-    // split an interval at the optimal position between minSplitPos and
-    // maxSplitPos in two parts:
-    // 1) the left part has already a location assigned
-    // 2) the right part is sorted into to the unhandled-list
-    @SuppressWarnings("try")
-    private void splitBeforeUsage(TraceInterval interval, int minSplitPos, int maxSplitPos) {
-
-        try (Indent indent = Debug.logAndIndent("splitting interval %s between %d and %d", interval, minSplitPos, maxSplitPos)) {
-
-            assert interval.from() < minSplitPos : "cannot split at start of interval";
-            assert currentPosition < minSplitPos : "cannot split before current position";
-            assert minSplitPos <= maxSplitPos : "invalid order";
-            assert maxSplitPos <= interval.to() : "cannot split after end of interval";
-
-            final int optimalSplitPos = findOptimalSplitPos(interval, minSplitPos, maxSplitPos, true);
-
-            if (optimalSplitPos == interval.to() && interval.nextUsage(RegisterPriority.MustHaveRegister, minSplitPos) == Integer.MAX_VALUE) {
-                // the split position would be just before the end of the interval
-                // . no split at all necessary
-                if (Debug.isLogEnabled()) {
-                    Debug.log("no split necessary because optimal split position is at end of interval");
-                }
-                return;
-            }
-            // must calculate this before the actual split is performed and before split position is
-            // moved to odd opId
-            final int optimalSplitPosFinal;
-            boolean blockBegin = allocator.isBlockBegin(optimalSplitPos);
-            if (blockBegin) {
-                assert (optimalSplitPos & 1) == 0 : "Block begins must be even: " + optimalSplitPos;
-                // move position after the label (odd optId)
-                optimalSplitPosFinal = optimalSplitPos + 1;
-            } else {
-                // move position before actual instruction (odd opId)
-                optimalSplitPosFinal = (optimalSplitPos - 1) | 1;
-            }
-
-            // TODO( je) better define what min split pos max split pos mean.
-            assert minSplitPos <= optimalSplitPosFinal && optimalSplitPosFinal <= maxSplitPos || minSplitPos == maxSplitPos && optimalSplitPosFinal == minSplitPos - 1 : "out of range";
-            assert optimalSplitPosFinal <= interval.to() : "cannot split after end of interval";
-            assert optimalSplitPosFinal > interval.from() : "cannot split at start of interval";
-
-            if (Debug.isLogEnabled()) {
-                Debug.log("splitting at position %d", optimalSplitPosFinal);
-            }
-            assert optimalSplitPosFinal > currentPosition : "Can not split interval " + interval + " at current position: " + currentPosition;
-
-            // was:
-            // assert isBlockBegin || ((optimalSplitPos1 & 1) == 1) :
-            // "split pos must be odd when not on block boundary";
-            // assert !isBlockBegin || ((optimalSplitPos1 & 1) == 0) :
-            // "split pos must be even on block boundary";
-            assert (optimalSplitPosFinal & 1) == 1 : "split pos must be odd";
-
-            // TODO (je) duplicate code. try to fold
-            if (optimalSplitPosFinal == interval.to() && interval.nextUsage(RegisterPriority.MustHaveRegister, minSplitPos) == Integer.MAX_VALUE) {
-                // the split position would be just before the end of the interval
-                // . no split at all necessary
-                if (Debug.isLogEnabled()) {
-                    Debug.log("no split necessary because optimal split position is at end of interval");
-                }
-                return;
-            }
-            TraceInterval splitPart = interval.split(optimalSplitPosFinal, allocator);
-
-            boolean moveNecessary = true;
-            splitPart.setInsertMoveWhenActivated(moveNecessary);
-
-            assert splitPart.from() >= currentPosition : "cannot append new interval before current walk position";
-            unhandledAnyList.addToListSortedByStartAndUsePositions(splitPart);
-
-            if (Debug.isLogEnabled()) {
-                Debug.log("left interval  %s: %s", moveNecessary ? "      " : "", interval.logString(allocator));
-                Debug.log("right interval %s: %s", moveNecessary ? "(move)" : "", splitPart.logString(allocator));
-            }
-        }
-    }
-
-    // split an interval at the optimal position between minSplitPos and
-    // maxSplitPos in two parts:
-    // 1) the left part has already a location assigned
-    // 2) the right part is always on the stack and therefore ignored in further processing
-    @SuppressWarnings("try")
-    private void splitForSpilling(TraceInterval interval) {
-        // calculate allowed range of splitting position
-        int maxSplitPos = currentPosition;
-        int previousUsage = interval.previousUsage(RegisterPriority.ShouldHaveRegister, maxSplitPos);
-        if (previousUsage == currentPosition) {
-            /*
-             * If there is a usage with ShouldHaveRegister priority at the current position fall
-             * back to MustHaveRegister priority. This only happens if register priority was
-             * downgraded to MustHaveRegister in #allocLockedRegister.
-             */
-            previousUsage = interval.previousUsage(RegisterPriority.MustHaveRegister, maxSplitPos);
-        }
-        int minSplitPos = Math.max(previousUsage + 1, interval.from());
-
-        try (Indent indent = Debug.logAndIndent("splitting and spilling interval %s between %d and %d", interval, minSplitPos, maxSplitPos)) {
-
-            assert interval.state == State.Active : "why spill interval that is not active?";
-            assert interval.from() <= minSplitPos : "cannot split before start of interval";
-            assert minSplitPos <= maxSplitPos : "invalid order";
-            assert maxSplitPos < interval.to() : "cannot split at end end of interval";
-            assert currentPosition < interval.to() : "interval must not end before current position";
-
-            if (minSplitPos == interval.from()) {
-                // the whole interval is never used, so spill it entirely to memory
-
-                try (Indent indent2 = Debug.logAndIndent("spilling entire interval because split pos is at beginning of interval (use positions: %d)", interval.usePosList().size())) {
-
-                    assert interval.firstUsage(RegisterPriority.MustHaveRegister) > currentPosition : String.format("interval %s must not have use position before currentPosition %d", interval,
-                                    currentPosition);
-
-                    allocator.assignSpillSlot(interval);
-                    handleSpillSlot(interval);
-                    changeSpillState(interval, minSplitPos);
-
-                    // Also kick parent intervals out of register to memory when they have no use
-                    // position. This avoids short interval in register surrounded by intervals in
-                    // memory . avoid useless moves from memory to register and back
-                    TraceInterval parent = interval;
-                    while (parent != null && parent.isSplitChild()) {
-                        parent = parent.getSplitChildBeforeOpId(parent.from());
-
-                        if (isRegister(parent.location())) {
-                            if (parent.firstUsage(RegisterPriority.ShouldHaveRegister) == Integer.MAX_VALUE) {
-                                // parent is never used, so kick it out of its assigned register
-                                if (Debug.isLogEnabled()) {
-                                    Debug.log("kicking out interval %d out of its register because it is never used", parent.operandNumber);
-                                }
-                                allocator.assignSpillSlot(parent);
-                                handleSpillSlot(parent);
-                            } else {
-                                // do not go further back because the register is actually used by
-                                // the interval
-                                parent = null;
-                            }
-                        }
-                    }
-                }
-
-            } else {
-                // search optimal split pos, split interval and spill only the right hand part
-                int optimalSplitPos = findOptimalSplitPos(interval, minSplitPos, maxSplitPos, false);
-
-                assert minSplitPos <= optimalSplitPos && optimalSplitPos <= maxSplitPos : "out of range";
-                assert optimalSplitPos < interval.to() : "cannot split at end of interval";
-                assert optimalSplitPos >= interval.from() : "cannot split before start of interval";
-
-                if (!allocator.isBlockBegin(optimalSplitPos)) {
-                    // move position before actual instruction (odd opId)
-                    optimalSplitPos = (optimalSplitPos - 1) | 1;
-                }
-
-                try (Indent indent2 = Debug.logAndIndent("splitting at position %d", optimalSplitPos)) {
-                    assert allocator.isBlockBegin(optimalSplitPos) || ((optimalSplitPos & 1) == 1) : "split pos must be odd when not on block boundary";
-                    assert !allocator.isBlockBegin(optimalSplitPos) || ((optimalSplitPos & 1) == 0) : "split pos must be even on block boundary";
-
-                    TraceInterval spilledPart = interval.split(optimalSplitPos, allocator);
-                    allocator.assignSpillSlot(spilledPart);
-                    handleSpillSlot(spilledPart);
-                    changeSpillState(spilledPart, optimalSplitPos);
-
-                    if (!allocator.isBlockBegin(optimalSplitPos)) {
-                        if (Debug.isLogEnabled()) {
-                            Debug.log("inserting move from interval %s to %s", interval, spilledPart);
-                        }
-                        insertMove(optimalSplitPos, interval, spilledPart);
-                    } else {
-                        if (Debug.isLogEnabled()) {
-                            Debug.log("no need to insert move. done by data-flow resolution");
-                        }
-                    }
-
-                    // the currentSplitChild is needed later when moves are inserted for reloading
-                    assert spilledPart.currentSplitChild() == interval : "overwriting wrong currentSplitChild";
-                    spilledPart.makeCurrentSplitChild();
-
-                    if (Debug.isLogEnabled()) {
-                        Debug.log("left interval: %s", interval.logString(allocator));
-                        Debug.log("spilled interval   : %s", spilledPart.logString(allocator));
-                    }
-                }
-            }
-        }
-    }
-
-    /**
-     * Change spill state of an interval.
-     *
-     * Note: called during register allocation.
-     *
-     * @param spillPos position of the spill
-     */
-    private void changeSpillState(TraceInterval interval, int spillPos) {
-        if (TraceLinearScan.Options.LIROptTraceRAEliminateSpillMoves.getValue()) {
-            switch (interval.spillState()) {
-                case NoSpillStore:
-                    final int minSpillPos = interval.spillDefinitionPos();
-                    final int maxSpillPost = spillPos;
-
-                    final int optimalSpillPos = findOptimalSpillPos(minSpillPos, maxSpillPost);
-
-                    // assert !allocator.isBlockBegin(optimalSpillPos);
-                    assert !allocator.isBlockEnd(optimalSpillPos);
-                    assert (optimalSpillPos & 1) == 0 : "Spill pos must be even";
-
-                    interval.setSpillDefinitionPos(optimalSpillPos);
-                    interval.setSpillState(SpillState.SpillStore);
-                    break;
-                case SpillStore:
-                case StartInMemory:
-                case NoOptimization:
-                case NoDefinitionFound:
-                    // nothing to do
-                    break;
-
-                default:
-                    throw new BailoutException("other states not allowed at this time");
-            }
-        } else {
-            interval.setSpillState(SpillState.NoOptimization);
-        }
-    }
-
-    /**
-     * @param minSpillPos minimal spill position
-     * @param maxSpillPos maximal spill position
-     */
-    private int findOptimalSpillPos(int minSpillPos, int maxSpillPos) {
-        // TODO (JE): implement
-        int optimalSpillPos = findOptimalSpillPos0(minSpillPos, maxSpillPos) & (~1);
-        if (Debug.isLogEnabled()) {
-            Debug.log("optimal spill position: %d", optimalSpillPos);
-        }
-        return optimalSpillPos;
-    }
-
-    private int findOptimalSpillPos0(int minSpillPos, int maxSpillPos) {
-        // TODO (je) implement
-        if (minSpillPos == maxSpillPos) {
-            // trivial case, no optimization of split position possible
-            if (Debug.isLogEnabled()) {
-                Debug.log("min-pos and max-pos are equal, no optimization possible");
-            }
-            return minSpillPos;
-
-        }
-        assert minSpillPos < maxSpillPos : "must be true then";
-        assert minSpillPos >= 0 : "cannot access minSplitPos - 1 otherwise";
-
-        AbstractBlockBase<?> minBlock = allocator.blockForId(minSpillPos);
-        AbstractBlockBase<?> maxBlock = allocator.blockForId(maxSpillPos);
-
-        assert minBlock.getLinearScanNumber() <= maxBlock.getLinearScanNumber() : "invalid order";
-        if (minBlock == maxBlock) {
-            // split position cannot be moved to block boundary : so split as late as possible
-            if (Debug.isLogEnabled()) {
-                Debug.log("cannot move split pos to block boundary because minPos and maxPos are in same block");
-            }
-            return maxSpillPos;
-
-        }
-        // search optimal block boundary between minSplitPos and maxSplitPos
-        if (Debug.isLogEnabled()) {
-            Debug.log("moving split pos to optimal block boundary between block B%d and B%d", minBlock.getId(), maxBlock.getId());
-        }
-
-        // currently using the same heuristic as for splitting
-        return findOptimalSpillPos(minBlock, maxBlock, maxSpillPos);
-    }
-
-    private int findOptimalSpillPos(AbstractBlockBase<?> minBlock, AbstractBlockBase<?> maxBlock, int maxSplitPos) {
-        int fromBlockNr = minBlock.getLinearScanNumber();
-        int toBlockNr = maxBlock.getLinearScanNumber();
-
-        assert 0 <= fromBlockNr && fromBlockNr < blockCount() : "out of range";
-        assert 0 <= toBlockNr && toBlockNr < blockCount() : "out of range";
-        assert fromBlockNr < toBlockNr : "must cross block boundary";
-
-        /*
-         * Try to split at end of maxBlock. If this would be after maxSplitPos, then use the begin
-         * of maxBlock. We use last instruction -2 because we want to insert the move before the
-         * block end op.
-         */
-        int optimalSplitPos = allocator.getLastLirInstructionId(maxBlock) - 2;
-        if (optimalSplitPos > maxSplitPos) {
-            optimalSplitPos = allocator.getFirstLirInstructionId(maxBlock);
-        }
-
-        // minimal block probability
-        double minProbability = maxBlock.probability();
-        for (int i = toBlockNr - 1; i >= fromBlockNr; i--) {
-            AbstractBlockBase<?> cur = blockAt(i);
-
-            if (cur.probability() < minProbability) {
-                // Block with lower probability found. Split at the end of this block.
-                minProbability = cur.probability();
-                optimalSplitPos = allocator.getLastLirInstructionId(cur) - 2;
-            }
-        }
-        assert optimalSplitPos > allocator.maxOpId() || allocator.isBlockBegin(optimalSplitPos) || allocator.isBlockEnd(optimalSplitPos + 2) : "algorithm must move split pos to block boundary";
-
-        return optimalSplitPos;
-    }
-
-    /**
-     * This is called for every interval that is assigned to a stack slot.
-     */
-    private static void handleSpillSlot(TraceInterval interval) {
-        assert interval.location() != null && (interval.canMaterialize() || isStackSlotValue(interval.location())) : "interval not assigned to a stack slot " + interval;
-        // Do nothing. Stack slots are not processed in this implementation.
-    }
-
-    private void splitStackInterval(TraceInterval interval) {
-        int minSplitPos = currentPosition + 1;
-        int maxSplitPos = Math.min(interval.firstUsage(RegisterPriority.ShouldHaveRegister), interval.to());
-
-        splitBeforeUsage(interval, minSplitPos, maxSplitPos);
-    }
-
-    private void splitWhenPartialRegisterAvailable(TraceInterval interval, int registerAvailableUntil) {
-        int minSplitPos = Math.max(interval.previousUsage(RegisterPriority.ShouldHaveRegister, registerAvailableUntil), interval.from() + 1);
-        splitBeforeUsage(interval, minSplitPos, registerAvailableUntil);
-    }
-
-    private void splitAndSpillInterval(TraceInterval interval) {
-        assert interval.state == State.Active || interval.state == State.Inactive : "other states not allowed";
-
-        int currentPos = currentPosition;
-        if (interval.state == State.Inactive) {
-            // the interval is currently inactive, so no spill slot is needed for now.
-            // when the split part is activated, the interval has a new chance to get a register,
-            // so in the best case no stack slot is necessary
-            throw JVMCIError.shouldNotReachHere("TraceIntervals can not be inactive!");
-
-        } else {
-            // search the position where the interval must have a register and split
-            // at the optimal position before.
-            // The new created part is added to the unhandled list and will get a register
-            // when it is activated
-            int minSplitPos = currentPos + 1;
-            int maxSplitPos = interval.nextUsage(RegisterPriority.MustHaveRegister, minSplitPos);
-
-            if (maxSplitPos <= interval.to()) {
-                splitBeforeUsage(interval, minSplitPos, maxSplitPos);
-            } else {
-                Debug.log("No more usage, no need to split: %s", interval);
-            }
-
-            assert interval.nextUsage(RegisterPriority.MustHaveRegister, currentPos) == Integer.MAX_VALUE : "the remaining part is spilled to stack and therefore has no register";
-            splitForSpilling(interval);
-        }
-    }
-
-    @SuppressWarnings("try")
-    private boolean allocFreeRegister(TraceInterval interval) {
-        try (Indent indent = Debug.logAndIndent("trying to find free register for %s", interval)) {
-
-            initUseLists(true);
-            freeExcludeActiveFixed();
-            freeCollectInactiveFixed(interval);
-            freeExcludeActiveAny();
-            // freeCollectUnhandled(fixedKind, cur);
-
-            // usePos contains the start of the next interval that has this register assigned
-            // (either as a fixed register or a normal allocated register in the past)
-            // only intervals overlapping with cur are processed, non-overlapping invervals can be
-            // ignored safely
-            if (Debug.isLogEnabled()) {
-                // Enable this logging to see all register states
-                try (Indent indent2 = Debug.logAndIndent("state of registers:")) {
-                    for (Register register : availableRegs) {
-                        int i = register.number;
-                        Debug.log("reg %d (%s): usePos: %d", register.number, register, usePos[i]);
-                    }
-                }
-            }
-
-            Register hint = null;
-            IntervalHint locationHint = interval.locationHint(true);
-            if (locationHint != null && locationHint.location() != null && isRegister(locationHint.location())) {
-                hint = asRegister(locationHint.location());
-                if (Debug.isLogEnabled()) {
-                    Debug.log("hint register %3d (%4s) from interval %s", hint.number, hint, locationHint);
-                }
-            }
-            assert interval.location() == null : "register already assigned to interval";
-
-            // the register must be free at least until this position
-            int regNeededUntil = interval.from() + 1;
-            int intervalTo = interval.to();
-
-            boolean needSplit = false;
-            int splitPos = -1;
-
-            Register reg = null;
-            Register minFullReg = null;
-            Register maxPartialReg = null;
-
-            for (Register availableReg : availableRegs) {
-                int number = availableReg.number;
-                if (usePos[number] >= intervalTo) {
-                    // this register is free for the full interval
-                    if (minFullReg == null || availableReg.equals(hint) || (usePos[number] < usePos[minFullReg.number] && !minFullReg.equals(hint))) {
-                        minFullReg = availableReg;
-                    }
-                } else if (usePos[number] > regNeededUntil) {
-                    // this register is at least free until regNeededUntil
-                    if (maxPartialReg == null || availableReg.equals(hint) || (usePos[number] > usePos[maxPartialReg.number] && !maxPartialReg.equals(hint))) {
-                        maxPartialReg = availableReg;
-                    }
-                }
-            }
-
-            if (minFullReg != null) {
-                reg = minFullReg;
-            } else if (maxPartialReg != null) {
-                needSplit = true;
-                reg = maxPartialReg;
-            } else {
-                return false;
-            }
-
-            splitPos = usePos[reg.number];
-            interval.assignLocation(reg.asValue(interval.kind()));
-            if (Debug.isLogEnabled()) {
-                Debug.log("selected register %d (%s)", reg.number, reg);
-            }
-
-            assert splitPos > 0 : "invalid splitPos";
-            if (needSplit) {
-                // register not available for full interval, so split it
-                splitWhenPartialRegisterAvailable(interval, splitPos);
-            }
-            // only return true if interval is completely assigned
-            return true;
-        }
-    }
-
-    private void splitAndSpillIntersectingIntervals(Register reg) {
-        assert reg != null : "no register assigned";
-
-        for (int i = 0; i < spillIntervals[reg.number].size(); i++) {
-            TraceInterval interval = spillIntervals[reg.number].get(i);
-            removeFromList(interval);
-            splitAndSpillInterval(interval);
-        }
-    }
-
-    // Split an Interval and spill it to memory so that cur can be placed in a register
-    @SuppressWarnings("try")
-    private void allocLockedRegister(TraceInterval interval) {
-        try (Indent indent = Debug.logAndIndent("alloc locked register: need to split and spill to get register for %s", interval)) {
-
-            // the register must be free at least until this position
-            int firstUsage = interval.firstUsage(RegisterPriority.MustHaveRegister);
-            int firstShouldHaveUsage = interval.firstUsage(RegisterPriority.ShouldHaveRegister);
-            int regNeededUntil = Math.min(firstUsage, interval.from() + 1);
-            int intervalTo = interval.to();
-            assert regNeededUntil >= 0 && regNeededUntil < Integer.MAX_VALUE : "interval has no use";
-
-            Register reg;
-            Register ignore;
-            /*
-             * In the common case we don't spill registers that have _any_ use position that is
-             * closer than the next use of the current interval, but if we can't spill the current
-             * interval we weaken this strategy and also allow spilling of intervals that have a
-             * non-mandatory requirements (no MustHaveRegister use position).
-             */
-            for (RegisterPriority registerPriority = RegisterPriority.LiveAtLoopEnd; true; registerPriority = RegisterPriority.MustHaveRegister) {
-                // collect current usage of registers
-                initUseLists(false);
-                spillExcludeActiveFixed();
-                // spillBlockUnhandledFixed(cur);
-                spillBlockInactiveFixed(interval);
-                spillCollectActiveAny(registerPriority);
-                if (Debug.isLogEnabled()) {
-                    printRegisterState();
-                }
-
-                reg = null;
-                ignore = interval.location() != null && isRegister(interval.location()) ? asRegister(interval.location()) : null;
-
-                for (Register availableReg : availableRegs) {
-                    int number = availableReg.number;
-                    if (availableReg.equals(ignore)) {
-                        // this register must be ignored
-                    } else if (usePos[number] > regNeededUntil) {
-                        /*
-                         * If the use position is the same, prefer registers (active intervals)
-                         * where the value is already on the stack.
-                         */
-                        if (reg == null || (usePos[number] > usePos[reg.number]) || (usePos[number] == usePos[reg.number] && (!isInMemory.get(reg.number) && isInMemory.get(number)))) {
-                            reg = availableReg;
-                        }
-                    }
-                }
-
-                if (Debug.isLogEnabled()) {
-                    Debug.log("Register Selected: %s", reg);
-                }
-
-                int regUsePos = (reg == null ? 0 : usePos[reg.number]);
-                if (regUsePos <= firstShouldHaveUsage) {
-                    /* Check if there is another interval that is already in memory. */
-                    if (reg == null || interval.inMemoryAt(currentPosition) || !isInMemory.get(reg.number)) {
-                        if (Debug.isLogEnabled()) {
-                            Debug.log("able to spill current interval. firstUsage(register): %d, usePos: %d", firstUsage, regUsePos);
-                        }
-
-                        if (firstUsage <= interval.from() + 1) {
-                            if (registerPriority.equals(RegisterPriority.LiveAtLoopEnd)) {
-                                /*
-                                 * Tool of last resort: we can not spill the current interval so we
-                                 * try to spill an active interval that has a usage but do not
-                                 * require a register.
-                                 */
-                                Debug.log("retry with register priority must have register");
-                                continue;
-                            }
-                            String description = "cannot spill interval (" + interval + ") that is used in first instruction (possible reason: no register found) firstUsage=" + firstUsage +
-                                            ", interval.from()=" + interval.from() + "; already used candidates: " + Arrays.toString(availableRegs);
-                            /*
-                             * assign a reasonable register and do a bailout in product mode to
-                             * avoid errors
-                             */
-                            allocator.assignSpillSlot(interval);
-                            Debug.dump(allocator.getLIR(), description);
-                            allocator.printIntervals(description);
-                            throw new OutOfRegistersException("LinearScan: no register found", description);
-                        }
-
-                        splitAndSpillInterval(interval);
-                        return;
-                    }
-                }
-                // common case: break out of the loop
-                break;
-            }
-
-            boolean needSplit = blockPos[reg.number] <= intervalTo;
-
-            int splitPos = blockPos[reg.number];
-
-            if (Debug.isLogEnabled()) {
-                Debug.log("decided to use register %d", reg.number);
-            }
-            assert splitPos > 0 : "invalid splitPos";
-            assert needSplit || splitPos > interval.from() : "splitting interval at from";
-
-            interval.assignLocation(reg.asValue(interval.kind()));
-            if (needSplit) {
-                // register not available for full interval : so split it
-                splitWhenPartialRegisterAvailable(interval, splitPos);
-            }
-
-            // perform splitting and spilling for all affected intervals
-            splitAndSpillIntersectingIntervals(reg);
-            return;
-        }
-    }
-
-    @SuppressWarnings("try")
-    private void printRegisterState() {
-        try (Indent indent2 = Debug.logAndIndent("state of registers:")) {
-            for (Register reg : availableRegs) {
-                int i = reg.number;
-                try (Indent indent3 = Debug.logAndIndent("reg %d: usePos: %d, blockPos: %d, inMemory: %b, intervals: ", i, usePos[i], blockPos[i], isInMemory.get(i))) {
-                    for (int j = 0; j < spillIntervals[i].size(); j++) {
-                        Debug.log("%s", spillIntervals[i].get(j));
-                    }
-                }
-            }
-        }
-    }
-
-    private boolean noAllocationPossible(TraceInterval interval) {
-        if (allocator.callKillsRegisters()) {
-            // fast calculation of intervals that can never get a register because the
-            // the next instruction is a call that blocks all registers
-            // Note: this only works if a call kills all registers
-
-            // check if this interval is the result of a split operation
-            // (an interval got a register until this position)
-            int pos = interval.from();
-            if (isOdd(pos)) {
-                // the current instruction is a call that blocks all registers
-                if (pos < allocator.maxOpId() && allocator.hasCall(pos + 1) && interval.to() > pos + 1) {
-                    if (Debug.isLogEnabled()) {
-                        Debug.log("free register cannot be available because all registers blocked by following call");
-                    }
-
-                    // safety check that there is really no register available
-                    assert !allocFreeRegister(interval) : "found a register for this interval";
-                    return true;
-                }
-            }
-        }
-        return false;
-    }
-
-    private void initVarsForAlloc(TraceInterval interval) {
-        AllocatableRegisters allocatableRegisters = allocator.getRegisterAllocationConfig().getAllocatableRegisters(interval.kind().getPlatformKind());
-        availableRegs = allocatableRegisters.allocatableRegisters;
-        minReg = allocatableRegisters.minRegisterNumber;
-        maxReg = allocatableRegisters.maxRegisterNumber;
-    }
-
-    private static boolean isMove(LIRInstruction op, TraceInterval from, TraceInterval to) {
-        if (op instanceof ValueMoveOp) {
-            ValueMoveOp move = (ValueMoveOp) op;
-            if (isVariable(move.getInput()) && isVariable(move.getResult())) {
-                return move.getInput() != null && move.getInput().equals(from.operand) && move.getResult() != null && move.getResult().equals(to.operand);
-            }
-        }
-        return false;
-    }
-
-    // optimization (especially for phi functions of nested loops):
-    // assign same spill slot to non-intersecting intervals
-    private void combineSpilledIntervals(TraceInterval interval) {
-        if (interval.isSplitChild()) {
-            // optimization is only suitable for split parents
-            return;
-        }
-
-        IntervalHint locationHint = interval.locationHint(false);
-        if (locationHint == null || !(locationHint instanceof TraceInterval)) {
-            return;
-        }
-        TraceInterval registerHint = (TraceInterval) locationHint;
-        assert registerHint.isSplitParent() : "register hint must be split parent";
-
-        if (interval.spillState() != SpillState.NoOptimization || registerHint.spillState() != SpillState.NoOptimization) {
-            // combining the stack slots for intervals where spill move optimization is applied
-            // is not benefitial and would cause problems
-            return;
-        }
-
-        int beginPos = interval.from();
-        int endPos = interval.to();
-        if (endPos > allocator.maxOpId() || isOdd(beginPos) || isOdd(endPos)) {
-            // safety check that lirOpWithId is allowed
-            return;
-        }
-
-        if (!isMove(allocator.instructionForId(beginPos), registerHint, interval) || !isMove(allocator.instructionForId(endPos), interval, registerHint)) {
-            // cur and registerHint are not connected with two moves
-            return;
-        }
-
-        TraceInterval beginHint = registerHint.getSplitChildAtOpId(beginPos, LIRInstruction.OperandMode.USE, allocator);
-        TraceInterval endHint = registerHint.getSplitChildAtOpId(endPos, LIRInstruction.OperandMode.DEF, allocator);
-        if (beginHint == endHint || beginHint.to() != beginPos || endHint.from() != endPos) {
-            // registerHint must be split : otherwise the re-writing of use positions does not work
-            return;
-        }
-
-        assert beginHint.location() != null : "must have register assigned";
-        assert endHint.location() == null : "must not have register assigned";
-        assert interval.firstUsage(RegisterPriority.MustHaveRegister) == beginPos : "must have use position at begin of interval because of move";
-        assert endHint.firstUsage(RegisterPriority.MustHaveRegister) == endPos : "must have use position at begin of interval because of move";
-
-        if (isRegister(beginHint.location())) {
-            // registerHint is not spilled at beginPos : so it would not be benefitial to
-            // immediately spill cur
-            return;
-        }
-        assert registerHint.spillSlot() != null : "must be set when part of interval was spilled";
-
-        // modify intervals such that cur gets the same stack slot as registerHint
-        // delete use positions to prevent the intervals to get a register at beginning
-        interval.setSpillSlot(registerHint.spillSlot());
-        interval.removeFirstUsePos();
-        endHint.removeFirstUsePos();
-    }
-
-    // allocate a physical register or memory location to an interval
-    @Override
-    @SuppressWarnings("try")
-    protected boolean activateCurrent(TraceInterval interval) {
-        if (Debug.isLogEnabled()) {
-            logCurrentStatus();
-        }
-        boolean result = true;
-
-        try (Indent indent = Debug.logAndIndent("activating interval %s,  splitParent: %d", interval, interval.splitParent().operandNumber)) {
-
-            final Value operand = interval.operand;
-            if (interval.location() != null && isStackSlotValue(interval.location())) {
-                // activating an interval that has a stack slot assigned . split it at first use
-                // position
-                // used for method parameters
-                if (Debug.isLogEnabled()) {
-                    Debug.log("interval has spill slot assigned (method parameter) . split it before first use");
-                }
-                splitStackInterval(interval);
-                result = false;
-
-            } else {
-                if (interval.location() == null) {
-                    // interval has not assigned register . normal allocation
-                    // (this is the normal case for most intervals)
-                    if (Debug.isLogEnabled()) {
-                        Debug.log("normal allocation of register");
-                    }
-
-                    // assign same spill slot to non-intersecting intervals
-                    combineSpilledIntervals(interval);
-
-                    initVarsForAlloc(interval);
-                    if (noAllocationPossible(interval) || !allocFreeRegister(interval)) {
-                        // no empty register available.
-                        // split and spill another interval so that this interval gets a register
-                        allocLockedRegister(interval);
-                    }
-
-                    // spilled intervals need not be move to active-list
-                    if (!isRegister(interval.location())) {
-                        result = false;
-                    }
-                }
-            }
-
-            // load spilled values that become active from stack slot to register
-            if (interval.insertMoveWhenActivated()) {
-                assert interval.isSplitChild();
-                assert interval.currentSplitChild() != null;
-                assert !interval.currentSplitChild().operand.equals(operand) : "cannot insert move between same interval";
-                if (Debug.isLogEnabled()) {
-                    Debug.log("Inserting move from interval %d to %d because insertMoveWhenActivated is set", interval.currentSplitChild().operandNumber, interval.operandNumber);
-                }
-
-                insertMove(interval.from(), interval.currentSplitChild(), interval);
-            }
-            interval.makeCurrentSplitChild();
-
-        }
-
-        return result; // true = interval is moved to active list
-    }
-
-    void finishAllocation() {
-        // must be called when all intervals are allocated
-        moveResolver.resolveAndAppendMoves();
-    }
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceLocalMoveResolver.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,550 +0,0 @@
-/*
- * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import static com.oracle.graal.lir.LIRValueUtil.asVirtualStackSlot;
-import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
-import static com.oracle.graal.lir.LIRValueUtil.isVirtualStackSlot;
-import static jdk.vm.ci.code.ValueUtil.asRegister;
-import static jdk.vm.ci.code.ValueUtil.asStackSlot;
-import static jdk.vm.ci.code.ValueUtil.isIllegal;
-import static jdk.vm.ci.code.ValueUtil.isRegister;
-import static jdk.vm.ci.code.ValueUtil.isStackSlot;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-
-import jdk.vm.ci.code.StackSlot;
-import jdk.vm.ci.common.JVMCIError;
-import jdk.vm.ci.meta.AllocatableValue;
-import jdk.vm.ci.meta.Constant;
-import jdk.vm.ci.meta.JavaConstant;
-import jdk.vm.ci.meta.LIRKind;
-import jdk.vm.ci.meta.Value;
-
-import com.oracle.graal.debug.Debug;
-import com.oracle.graal.debug.Indent;
-import com.oracle.graal.lir.LIRInsertionBuffer;
-import com.oracle.graal.lir.LIRInstruction;
-import com.oracle.graal.lir.VirtualStackSlot;
-import com.oracle.graal.lir.framemap.FrameMap;
-import com.oracle.graal.lir.framemap.FrameMapBuilderTool;
-
-/**
- */
-public class TraceLocalMoveResolver {
-
-    private static final int STACK_SLOT_IN_CALLER_FRAME_IDX = -1;
-    private final TraceLinearScan allocator;
-
-    private int insertIdx;
-    private LIRInsertionBuffer insertionBuffer; // buffer where moves are inserted
-
-    private final List<TraceInterval> mappingFrom;
-    private final List<Constant> mappingFromOpr;
-    private final List<TraceInterval> mappingTo;
-    private final int[] registerBlocked;
-
-    private int[] stackBlocked;
-    private final int firstVirtualStackIndex;
-
-    private int getStackArrayIndex(Value stackSlotValue) {
-        if (isStackSlot(stackSlotValue)) {
-            return getStackArrayIndex(asStackSlot(stackSlotValue));
-        }
-        if (isVirtualStackSlot(stackSlotValue)) {
-            return getStackArrayIndex(asVirtualStackSlot(stackSlotValue));
-        }
-        throw JVMCIError.shouldNotReachHere("value is not a stack slot: " + stackSlotValue);
-    }
-
-    private int getStackArrayIndex(StackSlot stackSlot) {
-        int stackIdx;
-        if (stackSlot.isInCallerFrame()) {
-            // incoming stack arguments can be ignored
-            stackIdx = STACK_SLOT_IN_CALLER_FRAME_IDX;
-        } else {
-            assert stackSlot.getRawAddFrameSize() : "Unexpected stack slot: " + stackSlot;
-            int offset = -stackSlot.getRawOffset();
-            assert 0 <= offset && offset < firstVirtualStackIndex : String.format("Wrong stack slot offset: %d (first virtual stack slot index: %d", offset, firstVirtualStackIndex);
-            stackIdx = offset;
-        }
-        return stackIdx;
-    }
-
-    private int getStackArrayIndex(VirtualStackSlot virtualStackSlot) {
-        return firstVirtualStackIndex + virtualStackSlot.getId();
-    }
-
-    protected void setValueBlocked(Value location, int direction) {
-        assert direction == 1 || direction == -1 : "out of bounds";
-        if (isStackSlotValue(location)) {
-            int stackIdx = getStackArrayIndex(location);
-            if (stackIdx == STACK_SLOT_IN_CALLER_FRAME_IDX) {
-                // incoming stack arguments can be ignored
-                return;
-            }
-            if (stackIdx >= stackBlocked.length) {
-                stackBlocked = Arrays.copyOf(stackBlocked, stackIdx + 1);
-            }
-            stackBlocked[stackIdx] += direction;
-        } else {
-            assert direction == 1 || direction == -1 : "out of bounds";
-            if (isRegister(location)) {
-                registerBlocked[asRegister(location).number] += direction;
-            } else {
-                throw JVMCIError.shouldNotReachHere("unhandled value " + location);
-            }
-        }
-    }
-
-    protected TraceInterval getMappingFrom(int i) {
-        return mappingFrom.get(i);
-    }
-
-    protected int mappingFromSize() {
-        return mappingFrom.size();
-    }
-
-    protected int valueBlocked(Value location) {
-        if (isStackSlotValue(location)) {
-            int stackIdx = getStackArrayIndex(location);
-            if (stackIdx == STACK_SLOT_IN_CALLER_FRAME_IDX) {
-                // incoming stack arguments are always blocked (aka they can not be written)
-                return 1;
-            }
-            if (stackIdx >= stackBlocked.length) {
-                return 0;
-            }
-            return stackBlocked[stackIdx];
-        }
-        if (isRegister(location)) {
-            return registerBlocked[asRegister(location).number];
-        }
-        throw JVMCIError.shouldNotReachHere("unhandled value " + location);
-    }
-
-    protected boolean areMultipleReadsAllowed() {
-        return true;
-    }
-
-    boolean hasMappings() {
-        return mappingFrom.size() > 0;
-    }
-
-    protected TraceLinearScan getAllocator() {
-        return allocator;
-    }
-
-    protected TraceLocalMoveResolver(TraceLinearScan allocator) {
-
-        this.allocator = allocator;
-        this.mappingFrom = new ArrayList<>(8);
-        this.mappingFromOpr = new ArrayList<>(8);
-        this.mappingTo = new ArrayList<>(8);
-        this.insertIdx = -1;
-        this.insertionBuffer = new LIRInsertionBuffer();
-        this.registerBlocked = new int[allocator.getRegisters().length];
-        FrameMapBuilderTool frameMapBuilderTool = (FrameMapBuilderTool) allocator.getFrameMapBuilder();
-        FrameMap frameMap = frameMapBuilderTool.getFrameMap();
-        this.stackBlocked = new int[frameMapBuilderTool.getNumberOfStackSlots()];
-        this.firstVirtualStackIndex = !frameMap.frameNeedsAllocating() ? 0 : frameMap.currentFrameSize() + 1;
-    }
-
-    protected boolean checkEmpty() {
-        assert mappingFrom.size() == 0 && mappingFromOpr.size() == 0 && mappingTo.size() == 0 : "list must be empty before and after processing";
-        for (int i = 0; i < stackBlocked.length; i++) {
-            assert stackBlocked[i] == 0 : "stack map must be empty before and after processing";
-        }
-        for (int i = 0; i < getAllocator().getRegisters().length; i++) {
-            assert registerBlocked[i] == 0 : "register map must be empty before and after processing";
-        }
-        checkMultipleReads();
-        return true;
-    }
-
-    protected void checkMultipleReads() {
-        // multiple reads are allowed in SSA LSRA
-    }
-
-    private boolean verifyBeforeResolve() {
-        assert mappingFrom.size() == mappingFromOpr.size() : "length must be equal";
-        assert mappingFrom.size() == mappingTo.size() : "length must be equal";
-        assert insertIdx != -1 : "insert position not set";
-
-        int i;
-        int j;
-        if (!areMultipleReadsAllowed()) {
-            for (i = 0; i < mappingFrom.size(); i++) {
-                for (j = i + 1; j < mappingFrom.size(); j++) {
-                    assert mappingFrom.get(i) == null || mappingFrom.get(i) != mappingFrom.get(j) : "cannot read from same interval twice";
-                }
-            }
-        }
-
-        for (i = 0; i < mappingTo.size(); i++) {
-            for (j = i + 1; j < mappingTo.size(); j++) {
-                assert mappingTo.get(i) != mappingTo.get(j) : "cannot write to same interval twice";
-            }
-        }
-
-        HashSet<Value> usedRegs = new HashSet<>();
-        if (!areMultipleReadsAllowed()) {
-            for (i = 0; i < mappingFrom.size(); i++) {
-                TraceInterval interval = mappingFrom.get(i);
-                if (interval != null && !isIllegal(interval.location())) {
-                    boolean unique = usedRegs.add(interval.location());
-                    assert unique : "cannot read from same register twice";
-                }
-            }
-        }
-
-        usedRegs.clear();
-        for (i = 0; i < mappingTo.size(); i++) {
-            TraceInterval interval = mappingTo.get(i);
-            if (isIllegal(interval.location())) {
-                // After insertion the location may become illegal, so don't check it since multiple
-                // intervals might be illegal.
-                continue;
-            }
-            boolean unique = usedRegs.add(interval.location());
-            assert unique : "cannot write to same register twice";
-        }
-
-        verifyStackSlotMapping();
-
-        return true;
-    }
-
-    protected void verifyStackSlotMapping() {
-        // relax disjoint stack maps invariant
-    }
-
-    // mark assignedReg and assignedRegHi of the interval as blocked
-    private void blockRegisters(TraceInterval interval) {
-        Value location = interval.location();
-        if (mightBeBlocked(location)) {
-            assert areMultipleReadsAllowed() || valueBlocked(location) == 0 : "location already marked as used: " + location;
-            int direction = 1;
-            setValueBlocked(location, direction);
-            Debug.log("block %s", location);
-        }
-    }
-
-    // mark assignedReg and assignedRegHi of the interval as unblocked
-    private void unblockRegisters(TraceInterval interval) {
-        Value location = interval.location();
-        if (mightBeBlocked(location)) {
-            assert valueBlocked(location) > 0 : "location already marked as unused: " + location;
-            setValueBlocked(location, -1);
-            Debug.log("unblock %s", location);
-        }
-    }
-
-    /**
-     * Checks if the {@linkplain TraceInterval#location() location} of {@code to} is not blocked or
-     * is only blocked by {@code from}.
-     */
-    private boolean safeToProcessMove(TraceInterval from, TraceInterval to) {
-        Value fromReg = from != null ? from.location() : null;
-
-        Value location = to.location();
-        if (mightBeBlocked(location)) {
-            if ((valueBlocked(location) > 1 || (valueBlocked(location) == 1 && !isMoveToSelf(fromReg, location)))) {
-                return false;
-            }
-        }
-
-        return true;
-    }
-
-    protected boolean isMoveToSelf(Value from, Value to) {
-        assert to != null;
-        if (to.equals(from)) {
-            return true;
-        }
-        if (from != null && isRegister(from) && isRegister(to) && asRegister(from).equals(asRegister(to))) {
-            assert LIRKind.verifyMoveKinds(to.getLIRKind(), from.getLIRKind()) : String.format("Same register but Kind mismatch %s <- %s", to, from);
-            return true;
-        }
-        return false;
-    }
-
-    protected boolean mightBeBlocked(Value location) {
-        if (isRegister(location)) {
-            return true;
-        }
-        if (isStackSlotValue(location)) {
-            return true;
-        }
-        return false;
-    }
-
-    private void createInsertionBuffer(List<LIRInstruction> list) {
-        assert !insertionBuffer.initialized() : "overwriting existing buffer";
-        insertionBuffer.init(list);
-    }
-
-    private void appendInsertionBuffer() {
-        if (insertionBuffer.initialized()) {
-            insertionBuffer.finish();
-        }
-        assert !insertionBuffer.initialized() : "must be uninitialized now";
-
-        insertIdx = -1;
-    }
-
-    private void insertMove(TraceInterval fromInterval, TraceInterval toInterval) {
-        assert !fromInterval.operand.equals(toInterval.operand) : "from and to interval equal: " + fromInterval;
-        assert LIRKind.verifyMoveKinds(toInterval.kind(), fromInterval.kind()) : "move between different types";
-        assert insertIdx != -1 : "must setup insert position first";
-
-        insertionBuffer.append(insertIdx, createMove(fromInterval.operand, toInterval.operand, fromInterval.location(), toInterval.location()));
-
-        if (Debug.isLogEnabled()) {
-            Debug.log("insert move from %s to %s at %d", fromInterval, toInterval, insertIdx);
-        }
-    }
-
-    /**
-     * @param fromOpr {@link TraceInterval#operand operand} of the {@code from} interval
-     * @param toOpr {@link TraceInterval#operand operand} of the {@code to} interval
-     * @param fromLocation {@link TraceInterval#location() location} of the {@code to} interval
-     * @param toLocation {@link TraceInterval#location() location} of the {@code to} interval
-     */
-    protected LIRInstruction createMove(AllocatableValue fromOpr, AllocatableValue toOpr, AllocatableValue fromLocation, AllocatableValue toLocation) {
-        if (isStackSlotValue(toLocation) && isStackSlotValue(fromLocation)) {
-            return getAllocator().getSpillMoveFactory().createStackMove(toOpr, fromOpr);
-        }
-        return getAllocator().getSpillMoveFactory().createMove(toOpr, fromOpr);
-    }
-
-    private void insertMove(Constant fromOpr, TraceInterval toInterval) {
-        assert insertIdx != -1 : "must setup insert position first";
-
-        AllocatableValue toOpr = toInterval.operand;
-        LIRInstruction move = getAllocator().getSpillMoveFactory().createLoad(toOpr, fromOpr);
-        insertionBuffer.append(insertIdx, move);
-
-        if (Debug.isLogEnabled()) {
-            Debug.log("insert move from value %s to %s at %d", fromOpr, toInterval, insertIdx);
-        }
-    }
-
-    @SuppressWarnings("try")
-    private void resolveMappings() {
-        try (Indent indent = Debug.logAndIndent("resolveMapping")) {
-            assert verifyBeforeResolve();
-            if (Debug.isLogEnabled()) {
-                printMapping();
-            }
-
-            // Block all registers that are used as input operands of a move.
-            // When a register is blocked, no move to this register is emitted.
-            // This is necessary for detecting cycles in moves.
-            int i;
-            for (i = mappingFrom.size() - 1; i >= 0; i--) {
-                TraceInterval fromInterval = mappingFrom.get(i);
-                if (fromInterval != null) {
-                    blockRegisters(fromInterval);
-                }
-            }
-
-            int spillCandidate = -1;
-            while (mappingFrom.size() > 0) {
-                boolean processedInterval = false;
-
-                for (i = mappingFrom.size() - 1; i >= 0; i--) {
-                    TraceInterval fromInterval = mappingFrom.get(i);
-                    TraceInterval toInterval = mappingTo.get(i);
-
-                    if (safeToProcessMove(fromInterval, toInterval)) {
-                        // this interval can be processed because target is free
-                        if (fromInterval != null) {
-                            insertMove(fromInterval, toInterval);
-                            unblockRegisters(fromInterval);
-                        } else {
-                            insertMove(mappingFromOpr.get(i), toInterval);
-                        }
-                        mappingFrom.remove(i);
-                        mappingFromOpr.remove(i);
-                        mappingTo.remove(i);
-
-                        processedInterval = true;
-                    } else if (fromInterval != null && isRegister(fromInterval.location())) {
-                        // this interval cannot be processed now because target is not free
-                        // it starts in a register, so it is a possible candidate for spilling
-                        spillCandidate = i;
-                    }
-                }
-
-                if (!processedInterval) {
-                    breakCycle(spillCandidate);
-                }
-            }
-        }
-
-        // check that all intervals have been processed
-        assert checkEmpty();
-    }
-
-    protected void breakCycle(int spillCandidate) {
-        if (spillCandidate != -1) {
-            // no move could be processed because there is a cycle in the move list
-            // (e.g. r1 . r2, r2 . r1), so one interval must be spilled to memory
-            assert spillCandidate != -1 : "no interval in register for spilling found";
-
-            // create a new spill interval and assign a stack slot to it
-            TraceInterval fromInterval1 = mappingFrom.get(spillCandidate);
-            // do not allocate a new spill slot for temporary interval, but
-            // use spill slot assigned to fromInterval. Otherwise moves from
-            // one stack slot to another can happen (not allowed by LIRAssembler
-            AllocatableValue spillSlot1 = fromInterval1.spillSlot();
-            if (spillSlot1 == null) {
-                spillSlot1 = getAllocator().getFrameMapBuilder().allocateSpillSlot(fromInterval1.kind());
-                fromInterval1.setSpillSlot(spillSlot1);
-            }
-            spillInterval(spillCandidate, fromInterval1, spillSlot1);
-            return;
-        }
-        assert mappingFromSize() > 1;
-        // Arbitrarily select the first entry for spilling.
-        int stackSpillCandidate = 0;
-        TraceInterval fromInterval = getMappingFrom(stackSpillCandidate);
-        assert isStackSlotValue(fromInterval.location());
-        // allocate new stack slot
-        VirtualStackSlot spillSlot = getAllocator().getFrameMapBuilder().allocateSpillSlot(fromInterval.kind());
-        spillInterval(stackSpillCandidate, fromInterval, spillSlot);
-    }
-
-    protected void spillInterval(int spillCandidate, TraceInterval fromInterval, AllocatableValue spillSlot) {
-        assert mappingFrom.get(spillCandidate).equals(fromInterval);
-        TraceInterval spillInterval = getAllocator().createDerivedInterval(fromInterval);
-        spillInterval.setKind(fromInterval.kind());
-
-        // add a dummy range because real position is difficult to calculate
-        // Note: this range is a special case when the integrity of the allocation is
-        // checked
-        spillInterval.addRange(1, 2);
-
-        spillInterval.assignLocation(spillSlot);
-
-        if (Debug.isLogEnabled()) {
-            Debug.log("created new Interval for spilling: %s", spillInterval);
-        }
-        blockRegisters(spillInterval);
-
-        // insert a move from register to stack and update the mapping
-        insertMove(fromInterval, spillInterval);
-        mappingFrom.set(spillCandidate, spillInterval);
-        unblockRegisters(fromInterval);
-    }
-
-    @SuppressWarnings("try")
-    private void printMapping() {
-        try (Indent indent = Debug.logAndIndent("Mapping")) {
-            for (int i = mappingFrom.size() - 1; i >= 0; i--) {
-                TraceInterval fromInterval = mappingFrom.get(i);
-                TraceInterval toInterval = mappingTo.get(i);
-                String from;
-                Value to = toInterval.location();
-                if (fromInterval == null) {
-                    from = mappingFromOpr.get(i).toString();
-                } else {
-                    from = fromInterval.location().toString();
-                }
-                Debug.log("move %s <- %s", from, to);
-            }
-        }
-    }
-
-    void setInsertPosition(List<LIRInstruction> insertList, int insertIdx) {
-        assert this.insertIdx == -1 : "use moveInsertPosition instead of setInsertPosition when data already set";
-
-        createInsertionBuffer(insertList);
-        this.insertIdx = insertIdx;
-    }
-
-    void moveInsertPosition(List<LIRInstruction> newInsertList, int newInsertIdx) {
-        if (insertionBuffer.lirList() != null && (insertionBuffer.lirList() != newInsertList || this.insertIdx != newInsertIdx)) {
-            // insert position changed . resolve current mappings
-            resolveMappings();
-        }
-
-        assert insertionBuffer.lirList() != newInsertList || newInsertIdx >= insertIdx : String.format("Decreasing insert index: old=%d new=%d", insertIdx, newInsertIdx);
-
-        if (insertionBuffer.lirList() != newInsertList) {
-            // block changed . append insertionBuffer because it is
-            // bound to a specific block and create a new insertionBuffer
-            appendInsertionBuffer();
-            createInsertionBuffer(newInsertList);
-        }
-
-        this.insertIdx = newInsertIdx;
-    }
-
-    public void addMapping(TraceInterval fromInterval, TraceInterval toInterval) {
-
-        if (isIllegal(toInterval.location()) && toInterval.canMaterialize()) {
-            if (Debug.isLogEnabled()) {
-                Debug.log("no store to rematerializable interval %s needed", toInterval);
-            }
-            return;
-        }
-        if (isIllegal(fromInterval.location()) && fromInterval.canMaterialize()) {
-            // Instead of a reload, re-materialize the value
-            JavaConstant rematValue = fromInterval.getMaterializedValue();
-            addMapping(rematValue, toInterval);
-            return;
-        }
-        if (Debug.isLogEnabled()) {
-            Debug.log("add move mapping from %s to %s", fromInterval, toInterval);
-        }
-
-        assert !fromInterval.operand.equals(toInterval.operand) : "from and to interval equal: " + fromInterval;
-        assert LIRKind.verifyMoveKinds(toInterval.kind(), fromInterval.kind()) : String.format("Kind mismatch: %s vs. %s, from=%s, to=%s", fromInterval.kind(), toInterval.kind(), fromInterval,
-                        toInterval);
-        mappingFrom.add(fromInterval);
-        mappingFromOpr.add(null);
-        mappingTo.add(toInterval);
-    }
-
-    public void addMapping(Constant fromOpr, TraceInterval toInterval) {
-        if (Debug.isLogEnabled()) {
-            Debug.log("add move mapping from %s to %s", fromOpr, toInterval);
-        }
-
-        mappingFrom.add(null);
-        mappingFromOpr.add(fromOpr);
-        mappingTo.add(toInterval);
-    }
-
-    void resolveAndAppendMoves() {
-        if (hasMappings()) {
-            resolveMappings();
-        }
-        appendInsertionBuffer();
-    }
-}
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceRegisterAllocationPhase.java	Mon Nov 30 15:07:18 2015 +0100
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceRegisterAllocationPhase.java	Mon Nov 30 17:18:36 2015 +0100
@@ -43,6 +43,7 @@
 import com.oracle.graal.lir.StandardOp.JumpOp;
 import com.oracle.graal.lir.StandardOp.LabelOp;
 import com.oracle.graal.lir.alloc.trace.TraceAllocationPhase.TraceAllocationContext;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceLinearScan;
 import com.oracle.graal.lir.gen.LIRGenerationResult;
 import com.oracle.graal.lir.gen.LIRGeneratorTool.MoveFactory;
 import com.oracle.graal.lir.phases.AllocationPhase;
@@ -68,7 +69,7 @@
     private static final TraceGlobalMoveResolutionPhase TRACE_GLOBAL_MOVE_RESOLUTION_PHASE = new TraceGlobalMoveResolutionPhase();
     private static final TraceTrivialAllocator TRACE_TRIVIAL_ALLOCATOR = new TraceTrivialAllocator();
 
-    static final int TRACE_DUMP_LEVEL = 3;
+    public static final int TRACE_DUMP_LEVEL = 3;
     private static final DebugMetric trivialTracesMetric = Debug.metric("TraceRA[trivialTraces]");
     private static final DebugMetric tracesMetric = Debug.metric("TraceRA[traces]");
 
@@ -116,7 +117,7 @@
 
     /**
      * Remove Phi/Sigma In/Out.
-     * 
+     *
      * Note: Incoming Values are needed for the RegisterVerifier, otherwise SIGMAs/PHIs where the
      * Out and In value matches (ie. there is no resolution move) are falsely detected as errors.
      */
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceUtil.java	Mon Nov 30 15:07:18 2015 +0100
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/TraceUtil.java	Mon Nov 30 17:18:36 2015 +0100
@@ -27,9 +27,9 @@
 import com.oracle.graal.compiler.common.alloc.TraceBuilder.TraceBuilderResult;
 import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
 
-class TraceUtil {
+public class TraceUtil {
 
-    static AbstractBlockBase<?> getBestTraceInterPredecessor(TraceBuilderResult<?> traceResult, AbstractBlockBase<?> block) {
+    public static AbstractBlockBase<?> getBestTraceInterPredecessor(TraceBuilderResult<?> traceResult, AbstractBlockBase<?> block) {
         AbstractBlockBase<?> bestPred = null;
         int bestTraceId = traceResult.getTraceForBlock(block);
         for (AbstractBlockBase<?> pred : block.getPredecessors()) {
@@ -42,12 +42,12 @@
         return bestPred;
     }
 
-    static boolean isShadowedRegisterValue(Value value) {
+    public static boolean isShadowedRegisterValue(Value value) {
         assert value != null;
         return value instanceof ShadowedRegisterValue;
     }
 
-    static ShadowedRegisterValue asShadowedRegisterValue(Value value) {
+    public static ShadowedRegisterValue asShadowedRegisterValue(Value value) {
         assert isShadowedRegisterValue(value);
         return (ShadowedRegisterValue) value;
     }
--- a/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/UsePosList.java	Mon Nov 30 15:07:18 2015 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,125 +0,0 @@
-/*
- * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
- * or visit www.oracle.com if you need additional information or have any
- * questions.
- */
-package com.oracle.graal.lir.alloc.trace;
-
-import com.oracle.graal.compiler.common.util.IntList;
-import com.oracle.graal.lir.alloc.trace.TraceInterval.RegisterPriority;
-
-/**
- * List of use positions. Each entry in the list records the use position and register priority
- * associated with the use position. The entries in the list are in descending order of use
- * position.
- *
- */
-public final class UsePosList {
-
-    private IntList list;
-
-    /**
-     * Creates a use list.
-     *
-     * @param initialCapacity the initial capacity of the list in terms of entries
-     */
-    public UsePosList(int initialCapacity) {
-        list = new IntList(initialCapacity * 2);
-    }
-
-    private UsePosList(IntList list) {
-        this.list = list;
-    }
-
-    /**
-     * Splits this list around a given position. All entries in this list with a use position
-     * greater or equal than {@code splitPos} are removed from this list and added to the returned
-     * list.
-     *
-     * @param splitPos the position for the split
-     * @return a use position list containing all entries removed from this list that have a use
-     *         position greater or equal than {@code splitPos}
-     */
-    public UsePosList splitAt(int splitPos) {
-        int i = size() - 1;
-        int len = 0;
-        while (i >= 0 && usePos(i) < splitPos) {
-            --i;
-            len += 2;
-        }
-        int listSplitIndex = (i + 1) * 2;
-        IntList childList = list;
-        list = IntList.copy(this.list, listSplitIndex, len);
-        childList.setSize(listSplitIndex);
-        UsePosList child = new UsePosList(childList);
-        return child;
-    }
-
-    /**
-     * Gets the use position at a specified index in this list.
-     *
-     * @param index the index of the entry for which the use position is returned
-     * @return the use position of entry {@code index} in this list
-     */
-    public int usePos(int index) {
-        return list.get(index << 1);
-    }
-
-    /**
-     * Gets the register priority for the use position at a specified index in this list.
-     *
-     * @param index the index of the entry for which the register priority is returned
-     * @return the register priority of entry {@code index} in this list
-     */
-    public RegisterPriority registerPriority(int index) {
-        return RegisterPriority.VALUES[list.get((index << 1) + 1)];
-    }
-
-    public void add(int usePos, RegisterPriority registerPriority) {
-        assert list.size() == 0 || usePos(size() - 1) > usePos;
-        list.add(usePos);
-        list.add(registerPriority.ordinal());
-    }
-
-    public int size() {
-        return list.size() >> 1;
-    }
-
-    public void removeLowestUsePos() {
-        list.setSize(list.size() - 2);
-    }
-
-    public void setRegisterPriority(int index, RegisterPriority registerPriority) {
-        list.set((index << 1) + 1, registerPriority.ordinal());
-    }
-
-    @Override
-    public String toString() {
-        StringBuilder buf = new StringBuilder("[");
-        for (int i = size() - 1; i >= 0; --i) {
-            if (buf.length() != 1) {
-                buf.append(", ");
-            }
-            RegisterPriority prio = registerPriority(i);
-            buf.append(usePos(i)).append(" -> ").append(prio.ordinal()).append(':').append(prio);
-        }
-        return buf.append("]").toString();
-    }
-}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/FixedInterval.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,310 @@
+/*
+ * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import static jdk.vm.ci.code.ValueUtil.asRegister;
+import static jdk.vm.ci.code.ValueUtil.isRegister;
+import jdk.vm.ci.meta.AllocatableValue;
+import jdk.vm.ci.meta.Value;
+
+import com.oracle.graal.lir.LIRInstruction;
+
+/**
+ * Represents a fixed interval.
+ */
+final class FixedInterval extends IntervalHint {
+
+    static final class FixedList {
+
+        public FixedInterval fixed;
+
+        public FixedList(FixedInterval fixed) {
+            this.fixed = fixed;
+        }
+
+        /**
+         * Gets the fixed list.
+         */
+        public FixedInterval getFixed() {
+            return fixed;
+        }
+
+        /**
+         * Sets the fixed list.
+         */
+        public void setFixed(FixedInterval list) {
+            fixed = list;
+        }
+
+        /**
+         * Adds an interval to a list sorted by {@linkplain FixedInterval#currentFrom() current
+         * from} positions.
+         *
+         * @param interval the interval to add
+         */
+        public void addToListSortedByCurrentFromPositions(FixedInterval interval) {
+            FixedInterval list = getFixed();
+            FixedInterval prev = null;
+            FixedInterval cur = list;
+            while (cur.currentFrom() < interval.currentFrom()) {
+                prev = cur;
+                cur = cur.next;
+            }
+            FixedInterval result = list;
+            if (prev == null) {
+                // add to head of list
+                result = interval;
+            } else {
+                // add before 'cur'
+                prev.next = interval;
+            }
+            interval.next = cur;
+            setFixed(result);
+        }
+
+    }
+
+    /**
+     * The fixed operand of this interval.
+     */
+    public final AllocatableValue operand;
+
+    /**
+     * The head of the list of ranges describing this interval. This list is sorted by
+     * {@linkplain LIRInstruction#id instruction ids}.
+     */
+    private FixedRange first;
+
+    /**
+     * Iterator used to traverse the ranges of an interval.
+     */
+    private FixedRange current;
+
+    /**
+     * Link to next interval in a sorted list of intervals that ends with {@link #EndMarker}.
+     */
+    FixedInterval next;
+
+    private int cachedTo; // cached value: to of last range (-1: not cached)
+
+    public FixedRange first() {
+        return first;
+    }
+
+    @Override
+    public int from() {
+        return first.from;
+    }
+
+    public int to() {
+        if (cachedTo == -1) {
+            cachedTo = calcTo();
+        }
+        assert cachedTo == calcTo() : "invalid cached value";
+        return cachedTo;
+    }
+
+    // test intersection
+    boolean intersects(TraceInterval i) {
+        return first.intersects(i);
+    }
+
+    int intersectsAt(TraceInterval i) {
+        return first.intersectsAt(i);
+    }
+
+    // range iteration
+    void rewindRange() {
+        current = first;
+    }
+
+    void nextRange() {
+        assert this != EndMarker : "not allowed on sentinel";
+        current = current.next;
+    }
+
+    int currentFrom() {
+        return current.from;
+    }
+
+    int currentTo() {
+        return current.to;
+    }
+
+    boolean currentAtEnd() {
+        return current == FixedRange.EndMarker;
+    }
+
+    boolean currentIntersects(TraceInterval it) {
+        return current.intersects(it);
+    }
+
+    int currentIntersectsAt(TraceInterval it) {
+        return current.intersectsAt(it);
+    }
+
+    // range creation
+    public void setFrom(int from) {
+        assert !isEmpty();
+        first().from = from;
+    }
+
+    private boolean isEmpty() {
+        return first() == FixedRange.EndMarker;
+    }
+
+    public void addRange(int from, int to) {
+        if (isEmpty()) {
+            first = new FixedRange(from, to, first());
+            return;
+        }
+        if (to <= to() && from >= from()) {
+            return;
+        }
+        if (from() == to) {
+            first().from = from;
+        } else {
+            first = new FixedRange(from, to, first());
+        }
+    }
+
+    @Override
+    public AllocatableValue location() {
+        return operand;
+    }
+
+    /**
+     * Sentinel interval to denote the end of an interval list.
+     */
+    static final FixedInterval EndMarker = new FixedInterval(Value.ILLEGAL);
+
+    FixedInterval(AllocatableValue operand) {
+        assert operand != null;
+        this.operand = operand;
+        this.first = FixedRange.EndMarker;
+        this.current = FixedRange.EndMarker;
+        this.next = FixedInterval.EndMarker;
+        this.cachedTo = -1;
+    }
+
+    int calcTo() {
+        assert first != FixedRange.EndMarker : "interval has no range";
+
+        FixedRange r = first;
+        while (r.next != FixedRange.EndMarker) {
+            r = r.next;
+        }
+        return r.to;
+    }
+
+    // returns true if the opId is inside the interval
+    boolean covers(int opId, LIRInstruction.OperandMode mode) {
+        FixedRange cur = first;
+
+        while (cur != FixedRange.EndMarker && cur.to < opId) {
+            cur = cur.next;
+        }
+        if (cur != FixedRange.EndMarker) {
+            assert cur.to != cur.next.from : "ranges not separated";
+
+            if (mode == LIRInstruction.OperandMode.DEF) {
+                return cur.from <= opId && opId < cur.to;
+            } else {
+                return cur.from <= opId && opId <= cur.to;
+            }
+        }
+        return false;
+    }
+
+    // returns true if the interval has any hole between holeFrom and holeTo
+    // (even if the hole has only the length 1)
+    boolean hasHoleBetween(int holeFrom, int holeTo) {
+        assert holeFrom < holeTo : "check";
+        assert from() <= holeFrom && holeTo <= to() : "index out of interval";
+
+        FixedRange cur = first;
+        while (cur != FixedRange.EndMarker) {
+            assert cur.to < cur.next.from : "no space between ranges";
+
+            // hole-range starts before this range . hole
+            if (holeFrom < cur.from) {
+                return true;
+
+                // hole-range completely inside this range . no hole
+            } else {
+                if (holeTo <= cur.to) {
+                    return false;
+
+                    // overlapping of hole-range with this range . hole
+                } else {
+                    if (holeFrom <= cur.to) {
+                        return true;
+                    }
+                }
+            }
+
+            cur = cur.next;
+        }
+
+        return false;
+    }
+
+    @Override
+    public String toString() {
+        String from = "?";
+        String to = "?";
+        if (first != null && first != FixedRange.EndMarker) {
+            from = String.valueOf(from());
+            // to() may cache a computed value, modifying the current object, which is a bad idea
+            // for a printing function. Compute it directly instead.
+            to = String.valueOf(calcTo());
+        }
+        String locationString = "@" + this.operand;
+        return asRegister(operand).number + ":" + operand + (isRegister(operand) ? "" : locationString) + "[" + from + "," + to + "]";
+    }
+
+    /**
+     * Gets a single line string for logging the details of this interval to a log stream.
+     */
+    @Override
+    public String logString(TraceLinearScan allocator) {
+        StringBuilder buf = new StringBuilder(100);
+        buf.append("fix ").append(asRegister(operand).number).append(':').append(operand).append(' ');
+
+        buf.append(" ranges{");
+
+        // print ranges
+        FixedRange cur = first;
+        while (cur != FixedRange.EndMarker) {
+            if (cur != first) {
+                buf.append(", ");
+            }
+            buf.append(cur);
+            cur = cur.next;
+            assert cur != null : "range list not closed with range sentinel";
+        }
+        buf.append("}");
+        return buf.toString();
+    }
+
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/FixedRange.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+/**
+ * Represents a range of integers from a start (inclusive) to an end (exclusive).
+ */
+final class FixedRange {
+
+    public static final FixedRange EndMarker = new FixedRange(Integer.MAX_VALUE, Integer.MAX_VALUE, null);
+
+    /**
+     * The start of the range, inclusive.
+     */
+    public int from;
+
+    /**
+     * The end of the range, exclusive.
+     */
+    public int to;
+
+    /**
+     * A link to allow the range to be put into a singly linked list.
+     */
+    public FixedRange next;
+
+    boolean intersects(TraceInterval i) {
+        return intersectsAt(i) != -1;
+    }
+
+    /**
+     * Creates a new range.
+     *
+     * @param from the start of the range, inclusive
+     * @param to the end of the range, exclusive
+     * @param next link to the next range in a linked list
+     */
+    FixedRange(int from, int to, FixedRange next) {
+        this.from = from;
+        this.to = to;
+        this.next = next;
+    }
+
+    int intersectsAt(TraceInterval other) {
+        FixedRange range = this;
+        assert other != null : "null ranges not allowed";
+        assert range != EndMarker && other != TraceInterval.EndMarker : "empty ranges not allowed";
+        int intervalFrom = other.from();
+        int intervalTo = other.to();
+
+        do {
+            if (range.from < intervalFrom) {
+                if (range.to <= intervalFrom) {
+                    range = range.next;
+                    if (range == EndMarker) {
+                        return -1;
+                    }
+                } else {
+                    return intervalFrom;
+                }
+            } else {
+                if (intervalFrom < range.from) {
+                    if (intervalTo <= range.from) {
+                        return -1;
+                    }
+                    return range.from;
+                } else {
+                    assert range.from == intervalFrom;
+                    if (range.from == range.to) {
+                        range = range.next;
+                        if (range == EndMarker) {
+                            return -1;
+                        }
+                    } else {
+                        if (intervalFrom == intervalTo) {
+                            return -1;
+                        }
+                        return range.from;
+                    }
+                }
+            }
+        } while (true);
+    }
+
+    @Override
+    public String toString() {
+        return "[" + from + ", " + to + "]";
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/IntervalHint.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import jdk.vm.ci.meta.AllocatableValue;
+
+/**
+ * An interval that is a hint for an {@code TraceInterval interval}.
+ */
+abstract class IntervalHint {
+
+    public abstract AllocatableValue location();
+
+    public abstract int from();
+
+    public abstract String logString(TraceLinearScan allocator);
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/RegisterVerifier.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,264 @@
+/*
+ * Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import static jdk.vm.ci.code.ValueUtil.asRegister;
+import static jdk.vm.ci.code.ValueUtil.isRegister;
+
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.List;
+
+import jdk.vm.ci.code.Register;
+import jdk.vm.ci.common.JVMCIError;
+import jdk.vm.ci.meta.Value;
+
+import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
+import com.oracle.graal.compiler.common.util.ArrayMap;
+import com.oracle.graal.debug.Debug;
+import com.oracle.graal.debug.Debug.Scope;
+import com.oracle.graal.debug.Indent;
+import com.oracle.graal.lir.InstructionValueConsumer;
+import com.oracle.graal.lir.LIRInstruction;
+import com.oracle.graal.lir.LIRInstruction.OperandFlag;
+import com.oracle.graal.lir.LIRInstruction.OperandMode;
+
+/**
+ */
+final class RegisterVerifier {
+
+    TraceLinearScan allocator;
+    List<AbstractBlockBase<?>> workList; // all blocks that must be processed
+    ArrayMap<TraceInterval[]> savedStates; // saved information of previous check
+
+    // simplified access to methods of LinearScan
+    TraceInterval intervalAt(Value operand) {
+        return allocator.intervalFor(operand);
+    }
+
+    // currently, only registers are processed
+    int stateSize() {
+        return allocator.numRegisters();
+    }
+
+    // accessors
+    TraceInterval[] stateForBlock(AbstractBlockBase<?> block) {
+        return savedStates.get(block.getId());
+    }
+
+    void setStateForBlock(AbstractBlockBase<?> block, TraceInterval[] savedState) {
+        savedStates.put(block.getId(), savedState);
+    }
+
+    void addToWorkList(AbstractBlockBase<?> block) {
+        if (!workList.contains(block)) {
+            workList.add(block);
+        }
+    }
+
+    RegisterVerifier(TraceLinearScan allocator) {
+        this.allocator = allocator;
+        workList = new ArrayList<>(16);
+        this.savedStates = new ArrayMap<>();
+
+    }
+
+    @SuppressWarnings("try")
+    void verify(AbstractBlockBase<?> start) {
+        try (Scope s = Debug.scope("RegisterVerifier")) {
+            // setup input registers (method arguments) for first block
+            TraceInterval[] inputState = new TraceInterval[stateSize()];
+            setStateForBlock(start, inputState);
+            addToWorkList(start);
+
+            // main loop for verification
+            do {
+                AbstractBlockBase<?> block = workList.get(0);
+                workList.remove(0);
+
+                processBlock(block);
+            } while (!workList.isEmpty());
+        }
+    }
+
+    @SuppressWarnings("try")
+    private void processBlock(AbstractBlockBase<?> block) {
+        try (Indent indent = Debug.logAndIndent("processBlock B%d", block.getId())) {
+            // must copy state because it is modified
+            TraceInterval[] inputState = copy(stateForBlock(block));
+
+            try (Indent indent2 = Debug.logAndIndent("Input-State of intervals:")) {
+                printState(inputState);
+            }
+
+            // process all operations of the block
+            processOperations(block, inputState);
+
+            try (Indent indent2 = Debug.logAndIndent("Output-State of intervals:")) {
+                printState(inputState);
+            }
+
+            // iterate all successors
+            for (AbstractBlockBase<?> succ : block.getSuccessors()) {
+                processSuccessor(succ, inputState);
+            }
+        }
+    }
+
+    protected void printState(TraceInterval[] inputState) {
+        for (int i = 0; i < stateSize(); i++) {
+            Register reg = allocator.getRegisters()[i];
+            assert reg.number == i;
+            if (inputState[i] != null) {
+                Debug.log(" %6s %4d  --  %s", reg, inputState[i].operandNumber, inputState[i]);
+            } else {
+                Debug.log(" %6s   __", reg);
+            }
+        }
+    }
+
+    private void processSuccessor(AbstractBlockBase<?> block, TraceInterval[] inputState) {
+        TraceInterval[] savedState = stateForBlock(block);
+
+        if (savedState != null) {
+            // this block was already processed before.
+            // check if new inputState is consistent with savedState
+
+            boolean savedStateCorrect = true;
+            for (int i = 0; i < stateSize(); i++) {
+                if (inputState[i] != savedState[i]) {
+                    // current inputState and previous savedState assume a different
+                    // interval in this register . assume that this register is invalid
+                    if (savedState[i] != null) {
+                        // invalidate old calculation only if it assumed that
+                        // register was valid. when the register was already invalid,
+                        // then the old calculation was correct.
+                        savedStateCorrect = false;
+                        savedState[i] = null;
+
+                        Debug.log("processSuccessor B%d: invalidating slot %d", block.getId(), i);
+                    }
+                }
+            }
+
+            if (savedStateCorrect) {
+                // already processed block with correct inputState
+                Debug.log("processSuccessor B%d: previous visit already correct", block.getId());
+            } else {
+                // must re-visit this block
+                Debug.log("processSuccessor B%d: must re-visit because input state changed", block.getId());
+                addToWorkList(block);
+            }
+
+        } else {
+            // block was not processed before, so set initial inputState
+            Debug.log("processSuccessor B%d: initial visit", block.getId());
+
+            setStateForBlock(block, copy(inputState));
+            addToWorkList(block);
+        }
+    }
+
+    static TraceInterval[] copy(TraceInterval[] inputState) {
+        return inputState.clone();
+    }
+
+    static void statePut(TraceInterval[] inputState, Value location, TraceInterval interval) {
+        if (location != null && isRegister(location)) {
+            Register reg = asRegister(location);
+            int regNum = reg.number;
+            if (interval != null) {
+                Debug.log("%s = %s", reg, interval.operand);
+            } else if (inputState[regNum] != null) {
+                Debug.log("%s = null", reg);
+            }
+
+            inputState[regNum] = interval;
+        }
+    }
+
+    static boolean checkState(AbstractBlockBase<?> block, LIRInstruction op, TraceInterval[] inputState, Value operand, Value reg, TraceInterval interval) {
+        if (reg != null && isRegister(reg)) {
+            if (inputState[asRegister(reg).number] != interval) {
+                throw new JVMCIError(
+                                "Error in register allocation: operation (%s) in block %s expected register %s (operand %s) to contain the value of interval %s but data-flow says it contains interval %s",
+                                op, block, reg, operand, interval, inputState[asRegister(reg).number]);
+            }
+        }
+        return true;
+    }
+
+    void processOperations(AbstractBlockBase<?> block, final TraceInterval[] inputState) {
+        List<LIRInstruction> ops = allocator.getLIR().getLIRforBlock(block);
+        InstructionValueConsumer useConsumer = new InstructionValueConsumer() {
+
+            @Override
+            public void visitValue(LIRInstruction op, Value operand, OperandMode mode, EnumSet<OperandFlag> flags) {
+                // we skip spill moves inserted by the spill position optimization
+                if (TraceLinearScan.isVariableOrRegister(operand) && allocator.isProcessed(operand) && op.id() != TraceLinearScan.DOMINATOR_SPILL_MOVE_ID) {
+                    TraceInterval interval = intervalAt(operand);
+                    if (op.id() != -1) {
+                        interval = interval.getSplitChildAtOpId(op.id(), mode, allocator);
+                    }
+
+                    assert checkState(block, op, inputState, interval.operand, interval.location(), interval.splitParent());
+                }
+            }
+        };
+
+        InstructionValueConsumer defConsumer = (op, operand, mode, flags) -> {
+            if (TraceLinearScan.isVariableOrRegister(operand) && allocator.isProcessed(operand)) {
+                TraceInterval interval = intervalAt(operand);
+                if (op.id() != -1) {
+                    interval = interval.getSplitChildAtOpId(op.id(), mode, allocator);
+                }
+
+                statePut(inputState, interval.location(), interval.splitParent());
+            }
+        };
+
+        // visit all instructions of the block
+        for (int i = 0; i < ops.size(); i++) {
+            final LIRInstruction op = ops.get(i);
+
+            if (Debug.isLogEnabled()) {
+                Debug.log("%s", op.toStringWithIdPrefix());
+            }
+
+            // check if input operands are correct
+            op.visitEachInput(useConsumer);
+            // invalidate all caller save registers at calls
+            if (op.destroysCallerSavedRegisters()) {
+                for (Register r : allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters()) {
+                    statePut(inputState, r.asValue(), null);
+                }
+            }
+            op.visitEachAlive(useConsumer);
+            // set temp operands (some operations use temp operands also as output operands, so
+            // can't set them null)
+            op.visitEachTemp(defConsumer);
+            // set output operands
+            op.visitEachOutput(defConsumer);
+        }
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceInterval.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,1091 @@
+/*
+ * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import static com.oracle.graal.compiler.common.GraalOptions.DetailedAsserts;
+import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
+import static com.oracle.graal.lir.LIRValueUtil.isVariable;
+import static com.oracle.graal.lir.LIRValueUtil.isVirtualStackSlot;
+import static jdk.vm.ci.code.ValueUtil.asRegister;
+import static jdk.vm.ci.code.ValueUtil.isIllegal;
+import static jdk.vm.ci.code.ValueUtil.isRegister;
+import static jdk.vm.ci.code.ValueUtil.isStackSlot;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.List;
+
+import jdk.vm.ci.code.BailoutException;
+import jdk.vm.ci.code.RegisterValue;
+import jdk.vm.ci.code.StackSlot;
+import jdk.vm.ci.common.JVMCIError;
+import jdk.vm.ci.meta.AllocatableValue;
+import jdk.vm.ci.meta.JavaConstant;
+import jdk.vm.ci.meta.LIRKind;
+import jdk.vm.ci.meta.Value;
+
+import com.oracle.graal.compiler.common.util.Util;
+import com.oracle.graal.debug.TTY;
+import com.oracle.graal.lir.LIRInstruction;
+import com.oracle.graal.lir.Variable;
+
+/**
+ * Represents an interval in the {@linkplain TraceLinearScan linear scan register allocator}.
+ */
+final class TraceInterval extends IntervalHint {
+
+    static final class AnyList {
+
+        /**
+         * List of intervals whose binding is currently {@link RegisterBinding#Any}.
+         */
+        public TraceInterval any;
+
+        public AnyList(TraceInterval any) {
+            this.any = any;
+        }
+
+        /**
+         * Gets the any list.
+         */
+        public TraceInterval getAny() {
+            return any;
+        }
+
+        /**
+         * Sets the any list.
+         */
+        public void setAny(TraceInterval list) {
+            any = list;
+        }
+
+        /**
+         * Adds an interval to a list sorted by {@linkplain TraceInterval#from() current from}
+         * positions.
+         *
+         * @param interval the interval to add
+         */
+        public void addToListSortedByFromPositions(TraceInterval interval) {
+            TraceInterval list = getAny();
+            TraceInterval prev = null;
+            TraceInterval cur = list;
+            while (cur.from() < interval.from()) {
+                prev = cur;
+                cur = cur.next;
+            }
+            TraceInterval result = list;
+            if (prev == null) {
+                // add to head of list
+                result = interval;
+            } else {
+                // add before 'cur'
+                prev.next = interval;
+            }
+            interval.next = cur;
+            setAny(result);
+        }
+
+        /**
+         * Adds an interval to a list sorted by {@linkplain TraceInterval#from() start} positions
+         * and {@linkplain TraceInterval#firstUsage(RegisterPriority) first usage} positions.
+         *
+         * @param interval the interval to add
+         */
+        public void addToListSortedByStartAndUsePositions(TraceInterval interval) {
+            TraceInterval list = getAny();
+            TraceInterval prev = null;
+            TraceInterval cur = list;
+            while (cur.from() < interval.from() || (cur.from() == interval.from() && cur.firstUsage(RegisterPriority.None) < interval.firstUsage(RegisterPriority.None))) {
+                prev = cur;
+                cur = cur.next;
+            }
+            if (prev == null) {
+                list = interval;
+            } else {
+                prev.next = interval;
+            }
+            interval.next = cur;
+            setAny(list);
+        }
+
+        /**
+         * Removes an interval from a list.
+         *
+         * @param i the interval to remove
+         */
+        public void removeAny(TraceInterval i) {
+            TraceInterval list = getAny();
+            TraceInterval prev = null;
+            TraceInterval cur = list;
+            while (cur != i) {
+                assert cur != null && cur != TraceInterval.EndMarker : "interval has not been found in list: " + i;
+                prev = cur;
+                cur = cur.next;
+            }
+            if (prev == null) {
+                setAny(cur.next);
+            } else {
+                prev.next = cur.next;
+            }
+        }
+    }
+
+    /**
+     * Constants denoting the register usage priority for an interval. The constants are declared in
+     * increasing order of priority are are used to optimize spilling when multiple overlapping
+     * intervals compete for limited registers.
+     */
+    public enum RegisterPriority {
+        /**
+         * No special reason for an interval to be allocated a register.
+         */
+        None,
+
+        /**
+         * Priority level for intervals live at the end of a loop.
+         */
+        LiveAtLoopEnd,
+
+        /**
+         * Priority level for intervals that should be allocated to a register.
+         */
+        ShouldHaveRegister,
+
+        /**
+         * Priority level for intervals that must be allocated to a register.
+         */
+        MustHaveRegister;
+
+        public static final RegisterPriority[] VALUES = values();
+
+        /**
+         * Determines if this priority is higher than or equal to a given priority.
+         */
+        public boolean greaterEqual(RegisterPriority other) {
+            return ordinal() >= other.ordinal();
+        }
+
+        /**
+         * Determines if this priority is lower than a given priority.
+         */
+        public boolean lessThan(RegisterPriority other) {
+            return ordinal() < other.ordinal();
+        }
+
+        public CharSequence shortName() {
+            return name().subSequence(0, 1);
+        }
+    }
+
+    /**
+     * Constants denoting whether an interval is bound to a specific register. This models platform
+     * dependencies on register usage for certain instructions.
+     */
+    enum RegisterBinding {
+        /**
+         * Interval is bound to a specific register as required by the platform.
+         */
+        Fixed,
+
+        /**
+         * Interval has no specific register requirements.
+         */
+        Any,
+
+        /**
+         * Interval is bound to a stack slot.
+         */
+        Stack;
+
+        public static final RegisterBinding[] VALUES = values();
+    }
+
+    /**
+     * Constants denoting the linear-scan states an interval may be in with respect to the
+     * {@linkplain TraceInterval#from() start} {@code position} of the interval being processed.
+     */
+    enum State {
+        /**
+         * An interval that starts after {@code position}.
+         */
+        Unhandled,
+
+        /**
+         * An interval that {@linkplain TraceInterval#covers covers} {@code position} and has an
+         * assigned register.
+         */
+        Active,
+
+        /**
+         * An interval that starts before and ends after {@code position} but does not
+         * {@linkplain TraceInterval#covers cover} it due to a lifetime hole.
+         */
+        Inactive,
+
+        /**
+         * An interval that ends before {@code position} or is spilled to memory.
+         */
+        Handled;
+    }
+
+    /**
+     * Constants used in optimization of spilling of an interval.
+     */
+    public enum SpillState {
+        /**
+         * Starting state of calculation: no definition found yet.
+         */
+        NoDefinitionFound,
+
+        /**
+         * One definition has already been found. Two consecutive definitions are treated as one
+         * (e.g. a consecutive move and add because of two-operand LIR form). The position of this
+         * definition is given by {@link TraceInterval#spillDefinitionPos()}.
+         */
+        NoSpillStore,
+
+        /**
+         * A spill move has already been inserted.
+         */
+        SpillStore,
+
+        /**
+         * The interval starts in memory (e.g. method parameter), so a store is never necessary.
+         */
+        StartInMemory,
+
+        /**
+         * The interval has more than one definition (e.g. resulting from phi moves), so stores to
+         * memory are not optimized.
+         */
+        NoOptimization;
+
+        public static final EnumSet<SpillState> IN_MEMORY = EnumSet.of(SpillStore, StartInMemory);
+    }
+
+    /**
+     * The {@linkplain RegisterValue register} or {@linkplain Variable variable} for this interval
+     * prior to register allocation.
+     */
+    public final AllocatableValue operand;
+
+    /**
+     * The operand number for this interval's {@linkplain #operand operand}.
+     */
+    public final int operandNumber;
+
+    /**
+     * The {@linkplain RegisterValue register} or {@linkplain StackSlot spill slot} assigned to this
+     * interval. In case of a spilled interval which is re-materialized this is
+     * {@link Value#ILLEGAL}.
+     */
+    private AllocatableValue location;
+
+    /**
+     * The stack slot to which all splits of this interval are spilled if necessary.
+     */
+    private AllocatableValue spillSlot;
+
+    /**
+     * The kind of this interval.
+     */
+    private LIRKind kind;
+
+    /**
+     * The start of the range, inclusive.
+     */
+    public int intFrom;
+
+    /**
+     * The end of the range, exclusive.
+     */
+    public int intTo;
+
+    /**
+     * List of (use-positions, register-priorities) pairs, sorted by use-positions.
+     */
+    private UsePosList usePosList;
+
+    /**
+     * Link to next interval in a sorted list of intervals that ends with {@link #EndMarker}.
+     */
+    TraceInterval next;
+
+    /**
+     * The linear-scan state of this interval.
+     */
+    State state;
+
+    /**
+     * The interval from which this one is derived. If this is a {@linkplain #isSplitParent() split
+     * parent}, it points to itself.
+     */
+    private TraceInterval splitParent;
+
+    /**
+     * List of all intervals that are split off from this interval. This is only used if this is a
+     * {@linkplain #isSplitParent() split parent}.
+     */
+    private List<TraceInterval> splitChildren = Collections.emptyList();
+
+    /**
+     * Current split child that has been active or inactive last (always stored in split parents).
+     */
+    private TraceInterval currentSplitChild;
+
+    /**
+     * Specifies if move is inserted between currentSplitChild and this interval when interval gets
+     * active the first time.
+     */
+    private boolean insertMoveWhenActivated;
+
+    /**
+     * For spill move optimization.
+     */
+    private SpillState spillState;
+
+    /**
+     * Position where this interval is defined (if defined only once).
+     */
+    private int spillDefinitionPos;
+
+    /**
+     * This interval should be assigned the same location as the hint interval.
+     */
+    private IntervalHint locationHint;
+
+    /**
+     * The value with which a spilled child interval can be re-materialized. Currently this must be
+     * a Constant.
+     */
+    private JavaConstant materializedValue;
+
+    /**
+     * The number of times {@link #addMaterializationValue(JavaConstant)} is called.
+     */
+    private int numMaterializationValuesAdded;
+
+    void assignLocation(AllocatableValue newLocation) {
+        if (isRegister(newLocation)) {
+            assert this.location == null : "cannot re-assign location for " + this;
+            if (newLocation.getLIRKind().equals(LIRKind.Illegal) && !kind.equals(LIRKind.Illegal)) {
+                this.location = asRegister(newLocation).asValue(kind);
+                return;
+            }
+        } else if (isIllegal(newLocation)) {
+            assert canMaterialize();
+        } else {
+            assert this.location == null || isRegister(this.location) || (isVirtualStackSlot(this.location) && isStackSlot(newLocation)) : "cannot re-assign location for " + this;
+            assert isStackSlotValue(newLocation);
+            assert !newLocation.getLIRKind().equals(LIRKind.Illegal);
+            assert newLocation.getLIRKind().equals(this.kind);
+        }
+        this.location = newLocation;
+    }
+
+    /**
+     * Gets the {@linkplain RegisterValue register} or {@linkplain StackSlot spill slot} assigned to
+     * this interval.
+     */
+    @Override
+    public AllocatableValue location() {
+        return location;
+    }
+
+    public LIRKind kind() {
+        assert !isRegister(operand) : "cannot access type for fixed interval";
+        return kind;
+    }
+
+    public void setKind(LIRKind kind) {
+        assert isRegister(operand) || this.kind().equals(LIRKind.Illegal) || this.kind().equals(kind) : "overwriting existing type";
+        this.kind = kind;
+    }
+
+    public boolean isEmpty() {
+        return intFrom == Integer.MAX_VALUE && intTo == Integer.MAX_VALUE;
+    }
+
+    public void setTo(int pos) {
+        assert intFrom == Integer.MAX_VALUE || intFrom < pos;
+        intTo = pos;
+    }
+
+    public void setFrom(int pos) {
+        assert intTo == Integer.MAX_VALUE || pos < intTo;
+        intFrom = pos;
+    }
+
+    @Override
+    public int from() {
+        return intFrom;
+    }
+
+    int to() {
+        return intTo;
+    }
+
+    int numUsePositions() {
+        return usePosList.size();
+    }
+
+    public void setLocationHint(IntervalHint interval) {
+        locationHint = interval;
+    }
+
+    public boolean isSplitParent() {
+        return splitParent == this;
+    }
+
+    boolean isSplitChild() {
+        return splitParent != this;
+    }
+
+    /**
+     * Gets the split parent for this interval.
+     */
+    public TraceInterval splitParent() {
+        assert splitParent.isSplitParent() : "not a split parent: " + this;
+        return splitParent;
+    }
+
+    /**
+     * Gets the canonical spill slot for this interval.
+     */
+    public AllocatableValue spillSlot() {
+        return splitParent().spillSlot;
+    }
+
+    public void setSpillSlot(AllocatableValue slot) {
+        assert isStackSlotValue(slot);
+        assert splitParent().spillSlot == null || (isVirtualStackSlot(splitParent().spillSlot) && isStackSlot(slot)) : "connot overwrite existing spill slot";
+        splitParent().spillSlot = slot;
+    }
+
+    TraceInterval currentSplitChild() {
+        return splitParent().currentSplitChild;
+    }
+
+    void makeCurrentSplitChild() {
+        splitParent().currentSplitChild = this;
+    }
+
+    boolean insertMoveWhenActivated() {
+        return insertMoveWhenActivated;
+    }
+
+    void setInsertMoveWhenActivated(boolean b) {
+        insertMoveWhenActivated = b;
+    }
+
+    // for spill optimization
+    public SpillState spillState() {
+        return splitParent().spillState;
+    }
+
+    public int spillDefinitionPos() {
+        return splitParent().spillDefinitionPos;
+    }
+
+    public void setSpillState(SpillState state) {
+        assert state.ordinal() >= spillState().ordinal() : "state cannot decrease";
+        splitParent().spillState = state;
+    }
+
+    public void setSpillDefinitionPos(int pos) {
+        assert spillState() == SpillState.NoDefinitionFound || spillState() == SpillState.NoSpillStore || spillDefinitionPos() == -1 : "cannot set the position twice";
+        int to = to();
+        assert pos < to : String.format("Cannot spill %s at %d", this, pos);
+        splitParent().spillDefinitionPos = pos;
+    }
+
+    /**
+     * Returns true if this interval has a shadow copy on the stack that is correct after
+     * {@code opId}.
+     */
+    public boolean inMemoryAt(int opId) {
+        SpillState spillSt = spillState();
+        return spillSt == SpillState.StartInMemory || (spillSt == SpillState.SpillStore && opId > spillDefinitionPos() && !canMaterialize());
+    }
+
+    void removeFirstUsePos() {
+        usePosList.removeLowestUsePos();
+    }
+
+    // test intersection
+    boolean intersects(TraceInterval i) {
+        return intersectsAt(i) != -1;
+    }
+
+    int intersectsAt(TraceInterval i) {
+        TraceInterval i1;
+        TraceInterval i2;
+        if (i.from() < this.from()) {
+            i1 = i;
+            i2 = this;
+        } else {
+            i1 = this;
+            i2 = i;
+        }
+        assert i1.from() <= i2.from();
+
+        if (i1.to() <= i2.from()) {
+            return -1;
+        }
+        return i2.from();
+    }
+
+    /**
+     * Sentinel interval to denote the end of an interval list.
+     */
+    static final TraceInterval EndMarker = new TraceInterval(Value.ILLEGAL, -1);
+
+    TraceInterval(AllocatableValue operand, int operandNumber) {
+        assert operand != null;
+        this.operand = operand;
+        this.operandNumber = operandNumber;
+        if (isRegister(operand)) {
+            location = operand;
+        } else {
+            assert isIllegal(operand) || isVariable(operand);
+        }
+        this.kind = LIRKind.Illegal;
+        this.intFrom = Integer.MAX_VALUE;
+        this.intTo = Integer.MAX_VALUE;
+        this.usePosList = new UsePosList(4);
+        this.next = EndMarker;
+        this.spillState = SpillState.NoDefinitionFound;
+        this.spillDefinitionPos = -1;
+        splitParent = this;
+        currentSplitChild = this;
+    }
+
+    /**
+     * Sets the value which is used for re-materialization.
+     */
+    public void addMaterializationValue(JavaConstant value) {
+        if (numMaterializationValuesAdded == 0) {
+            materializedValue = value;
+        } else {
+            // Interval is defined on multiple places -> no materialization is possible.
+            materializedValue = null;
+        }
+        numMaterializationValuesAdded++;
+    }
+
+    /**
+     * Returns true if this interval can be re-materialized when spilled. This means that no
+     * spill-moves are needed. Instead of restore-moves the {@link #materializedValue} is restored.
+     */
+    public boolean canMaterialize() {
+        return getMaterializedValue() != null;
+    }
+
+    /**
+     * Returns a value which can be moved to a register instead of a restore-move from stack.
+     */
+    public JavaConstant getMaterializedValue() {
+        return splitParent().materializedValue;
+    }
+
+    // consistency check of split-children
+    boolean checkSplitChildren() {
+        if (!splitChildren.isEmpty()) {
+            assert isSplitParent() : "only split parents can have children";
+
+            for (int i = 0; i < splitChildren.size(); i++) {
+                TraceInterval i1 = splitChildren.get(i);
+
+                assert i1.splitParent() == this : "not a split child of this interval";
+                assert i1.kind().equals(kind()) : "must be equal for all split children";
+                assert (i1.spillSlot() == null && spillSlot == null) || i1.spillSlot().equals(spillSlot()) : "must be equal for all split children";
+
+                for (int j = i + 1; j < splitChildren.size(); j++) {
+                    TraceInterval i2 = splitChildren.get(j);
+
+                    assert !i1.operand.equals(i2.operand) : "same register number";
+
+                    if (i1.from() < i2.from()) {
+                        assert i1.to() <= i2.from() && i1.to() < i2.to() : "intervals overlapping";
+                    } else {
+                        assert i2.from() < i1.from() : "intervals start at same opId";
+                        assert i2.to() <= i1.from() && i2.to() < i1.to() : "intervals overlapping";
+                    }
+                }
+            }
+        }
+
+        return true;
+    }
+
+    public IntervalHint locationHint(boolean searchSplitChild) {
+        if (!searchSplitChild) {
+            return locationHint;
+        }
+
+        if (locationHint != null) {
+            assert !(locationHint instanceof TraceInterval) || ((TraceInterval) locationHint).isSplitParent() : "ony split parents are valid hint registers";
+
+            if (locationHint.location() != null && isRegister(locationHint.location())) {
+                return locationHint;
+            } else if (locationHint instanceof TraceInterval) {
+                TraceInterval hint = (TraceInterval) locationHint;
+                if (!hint.splitChildren.isEmpty()) {
+                    // search the first split child that has a register assigned
+                    int len = hint.splitChildren.size();
+                    for (int i = 0; i < len; i++) {
+                        TraceInterval interval = hint.splitChildren.get(i);
+                        if (interval.location != null && isRegister(interval.location)) {
+                            return interval;
+                        }
+                    }
+                }
+            }
+        }
+
+        // no hint interval found that has a register assigned
+        return null;
+    }
+
+    TraceInterval getSplitChildAtOpId(int opId, LIRInstruction.OperandMode mode, TraceLinearScan allocator) {
+        assert isSplitParent() : "can only be called for split parents";
+        assert opId >= 0 : "invalid opId (method cannot be called for spill moves)";
+
+        if (splitChildren.isEmpty()) {
+            assert this.covers(opId, mode) : this + " does not cover " + opId;
+            return this;
+        } else {
+            TraceInterval result = null;
+            int len = splitChildren.size();
+
+            // in outputMode, the end of the interval (opId == cur.to()) is not valid
+            int toOffset = (mode == LIRInstruction.OperandMode.DEF ? 0 : 1);
+
+            int i;
+            for (i = 0; i < len; i++) {
+                TraceInterval cur = splitChildren.get(i);
+                if (cur.from() <= opId && opId < cur.to() + toOffset) {
+                    if (i > 0) {
+                        // exchange current split child to start of list (faster access for next
+                        // call)
+                        Util.atPutGrow(splitChildren, i, splitChildren.get(0), null);
+                        Util.atPutGrow(splitChildren, 0, cur, null);
+                    }
+
+                    // interval found
+                    result = cur;
+                    break;
+                }
+            }
+
+            assert checkSplitChild(result, opId, allocator, toOffset, mode);
+            return result;
+        }
+    }
+
+    private boolean checkSplitChild(TraceInterval result, int opId, TraceLinearScan allocator, int toOffset, LIRInstruction.OperandMode mode) {
+        if (result == null) {
+            // this is an error
+            StringBuilder msg = new StringBuilder(this.toString()).append(" has no child at ").append(opId);
+            if (!splitChildren.isEmpty()) {
+                TraceInterval firstChild = splitChildren.get(0);
+                TraceInterval lastChild = splitChildren.get(splitChildren.size() - 1);
+                msg.append(" (first = ").append(firstChild).append(", last = ").append(lastChild).append(")");
+            }
+            throw new JVMCIError("Linear Scan Error: %s", msg);
+        }
+
+        if (!splitChildren.isEmpty()) {
+            for (TraceInterval interval : splitChildren) {
+                if (interval != result && interval.from() <= opId && opId < interval.to() + toOffset) {
+                    TTY.println(String.format("two valid result intervals found for opId %d: %d and %d", opId, result.operandNumber, interval.operandNumber));
+                    TTY.println(result.logString(allocator));
+                    TTY.println(interval.logString(allocator));
+                    throw new BailoutException("two valid result intervals found");
+                }
+            }
+        }
+        assert result.covers(opId, mode) : "opId not covered by interval";
+        return true;
+    }
+
+    // returns the interval that covers the given opId or null if there is none
+    TraceInterval getIntervalCoveringOpId(int opId) {
+        assert opId >= 0 : "invalid opId";
+        assert opId < to() : "can only look into the past";
+
+        if (opId >= from()) {
+            return this;
+        }
+
+        TraceInterval parent = splitParent();
+        TraceInterval result = null;
+
+        assert !parent.splitChildren.isEmpty() : "no split children available";
+        int len = parent.splitChildren.size();
+
+        for (int i = len - 1; i >= 0; i--) {
+            TraceInterval cur = parent.splitChildren.get(i);
+            if (cur.from() <= opId && opId < cur.to()) {
+                assert result == null : "covered by multiple split children " + result + " and " + cur;
+                result = cur;
+            }
+        }
+
+        return result;
+    }
+
+    // returns the last split child that ends before the given opId
+    TraceInterval getSplitChildBeforeOpId(int opId) {
+        assert opId >= 0 : "invalid opId";
+
+        TraceInterval parent = splitParent();
+        TraceInterval result = null;
+
+        assert !parent.splitChildren.isEmpty() : "no split children available";
+        int len = parent.splitChildren.size();
+
+        for (int i = len - 1; i >= 0; i--) {
+            TraceInterval cur = parent.splitChildren.get(i);
+            if (cur.to() <= opId && (result == null || result.to() < cur.to())) {
+                result = cur;
+            }
+        }
+
+        assert result != null : "no split child found";
+        return result;
+    }
+
+    // checks if opId is covered by any split child
+    boolean splitChildCovers(int opId, LIRInstruction.OperandMode mode) {
+        assert isSplitParent() : "can only be called for split parents";
+        assert opId >= 0 : "invalid opId (method can not be called for spill moves)";
+
+        if (splitChildren.isEmpty()) {
+            // simple case if interval was not split
+            return covers(opId, mode);
+
+        } else {
+            // extended case: check all split children
+            int len = splitChildren.size();
+            for (int i = 0; i < len; i++) {
+                TraceInterval cur = splitChildren.get(i);
+                if (cur.covers(opId, mode)) {
+                    return true;
+                }
+            }
+            return false;
+        }
+    }
+
+    private RegisterPriority adaptPriority(RegisterPriority priority) {
+        /*
+         * In case of re-materialized values we require that use-operands are registers, because we
+         * don't have the value in a stack location. (Note that ShouldHaveRegister means that the
+         * operand can also be a StackSlot).
+         */
+        if (priority == RegisterPriority.ShouldHaveRegister && canMaterialize()) {
+            return RegisterPriority.MustHaveRegister;
+        }
+        return priority;
+    }
+
+    // Note: use positions are sorted descending . first use has highest index
+    int firstUsage(RegisterPriority minRegisterPriority) {
+        assert isVariable(operand) : "cannot access use positions for fixed intervals";
+
+        for (int i = usePosList.size() - 1; i >= 0; --i) {
+            RegisterPriority registerPriority = adaptPriority(usePosList.registerPriority(i));
+            if (registerPriority.greaterEqual(minRegisterPriority)) {
+                return usePosList.usePos(i);
+            }
+        }
+        return Integer.MAX_VALUE;
+    }
+
+    int nextUsage(RegisterPriority minRegisterPriority, int from) {
+        assert isVariable(operand) : "cannot access use positions for fixed intervals";
+
+        for (int i = usePosList.size() - 1; i >= 0; --i) {
+            int usePos = usePosList.usePos(i);
+            if (usePos >= from && adaptPriority(usePosList.registerPriority(i)).greaterEqual(minRegisterPriority)) {
+                return usePos;
+            }
+        }
+        return Integer.MAX_VALUE;
+    }
+
+    int nextUsageExact(RegisterPriority exactRegisterPriority, int from) {
+        assert isVariable(operand) : "cannot access use positions for fixed intervals";
+
+        for (int i = usePosList.size() - 1; i >= 0; --i) {
+            int usePos = usePosList.usePos(i);
+            if (usePos >= from && adaptPriority(usePosList.registerPriority(i)) == exactRegisterPriority) {
+                return usePos;
+            }
+        }
+        return Integer.MAX_VALUE;
+    }
+
+    int previousUsage(RegisterPriority minRegisterPriority, int from) {
+        assert isVariable(operand) : "cannot access use positions for fixed intervals";
+
+        int prev = -1;
+        for (int i = usePosList.size() - 1; i >= 0; --i) {
+            int usePos = usePosList.usePos(i);
+            if (usePos > from) {
+                return prev;
+            }
+            if (adaptPriority(usePosList.registerPriority(i)).greaterEqual(minRegisterPriority)) {
+                prev = usePos;
+            }
+        }
+        return prev;
+    }
+
+    public void addUsePos(int pos, RegisterPriority registerPriority) {
+        assert isEmpty() || covers(pos, LIRInstruction.OperandMode.USE) : String.format("use position %d not covered by live range of interval %s", pos, this);
+
+        // do not add use positions for precolored intervals because they are never used
+        if (registerPriority != RegisterPriority.None && isVariable(operand)) {
+            if (DetailedAsserts.getValue()) {
+                for (int i = 0; i < usePosList.size(); i++) {
+                    assert pos <= usePosList.usePos(i) : "already added a use-position with lower position";
+                    if (i > 0) {
+                        assert usePosList.usePos(i) < usePosList.usePos(i - 1) : "not sorted descending";
+                    }
+                }
+            }
+
+            // Note: addUse is called in descending order, so list gets sorted
+            // automatically by just appending new use positions
+            int len = usePosList.size();
+            if (len == 0 || usePosList.usePos(len - 1) > pos) {
+                usePosList.add(pos, registerPriority);
+            } else if (usePosList.registerPriority(len - 1).lessThan(registerPriority)) {
+                assert usePosList.usePos(len - 1) == pos : "list not sorted correctly";
+                usePosList.setRegisterPriority(len - 1, registerPriority);
+            }
+        }
+    }
+
+    public void addRange(int from, int to) {
+        assert from < to : "invalid range";
+
+        if (from < intFrom) {
+            setFrom(from);
+        }
+        if (intTo == Integer.MAX_VALUE || intTo < to) {
+            setTo(to);
+        }
+    }
+
+    TraceInterval newSplitChild(TraceLinearScan allocator) {
+        // allocate new interval
+        TraceInterval parent = splitParent();
+        TraceInterval result = allocator.createDerivedInterval(parent);
+        result.setKind(kind());
+
+        result.splitParent = parent;
+        result.setLocationHint(parent);
+
+        // insert new interval in children-list of parent
+        if (parent.splitChildren.isEmpty()) {
+            assert isSplitParent() : "list must be initialized at first split";
+
+            // Create new non-shared list
+            parent.splitChildren = new ArrayList<>(4);
+            parent.splitChildren.add(this);
+        }
+        parent.splitChildren.add(result);
+
+        return result;
+    }
+
+    /**
+     * Splits this interval at a specified position and returns the remainder as a new <i>child</i>
+     * interval of this interval's {@linkplain #splitParent() parent} interval.
+     * <p>
+     * When an interval is split, a bi-directional link is established between the original
+     * <i>parent</i> interval and the <i>children</i> intervals that are split off this interval.
+     * When a split child is split again, the new created interval is a direct child of the original
+     * parent. That is, there is no tree of split children stored, just a flat list. All split
+     * children are spilled to the same {@linkplain #spillSlot spill slot}.
+     *
+     * @param splitPos the position at which to split this interval
+     * @param allocator the register allocator context
+     * @return the child interval split off from this interval
+     */
+    TraceInterval split(int splitPos, TraceLinearScan allocator) {
+        assert isVariable(operand) : "cannot split fixed intervals";
+
+        // allocate new interval
+        TraceInterval result = newSplitChild(allocator);
+
+        // split the ranges
+        result.setTo(intTo);
+        result.setFrom(splitPos);
+        intTo = splitPos;
+
+        // split list of use positions
+        result.usePosList = usePosList.splitAt(splitPos);
+
+        if (DetailedAsserts.getValue()) {
+            for (int i = 0; i < usePosList.size(); i++) {
+                assert usePosList.usePos(i) < splitPos;
+            }
+            for (int i = 0; i < result.usePosList.size(); i++) {
+                assert result.usePosList.usePos(i) >= splitPos;
+            }
+        }
+        return result;
+    }
+
+    // returns true if the opId is inside the interval
+    boolean covers(int opId, LIRInstruction.OperandMode mode) {
+        if (mode == LIRInstruction.OperandMode.DEF) {
+            return from() <= opId && opId < to();
+        }
+        return from() <= opId && opId <= to();
+    }
+
+    @Override
+    public String toString() {
+        String from = "?";
+        String to = "?";
+        if (!isEmpty()) {
+            from = String.valueOf(from());
+            to = String.valueOf(to());
+        }
+        String locationString = this.location == null ? "" : "@" + this.location;
+        return operandNumber + ":" + operand + (isRegister(operand) ? "" : locationString) + "[" + from + "," + to + "]";
+    }
+
+    /**
+     * Gets the use position information for this interval.
+     */
+    public UsePosList usePosList() {
+        return usePosList;
+    }
+
+    /**
+     * Gets a single line string for logging the details of this interval to a log stream.
+     *
+     * @param allocator the register allocator context
+     */
+    @Override
+    public String logString(TraceLinearScan allocator) {
+        StringBuilder buf = new StringBuilder(100);
+        buf.append("any ").append(operandNumber).append(':').append(operand).append(' ');
+        if (!isRegister(operand)) {
+            if (location != null) {
+                buf.append("location{").append(location).append("} ");
+            }
+        }
+
+        buf.append("hints{").append(splitParent.operandNumber);
+        IntervalHint hint = locationHint(false);
+        if (hint != null) {
+            buf.append(", ").append(hint.location());
+        }
+        buf.append("} ranges{");
+
+        // print range
+        buf.append("[" + from() + ", " + to() + "]");
+        buf.append("} uses{");
+
+        // print use positions
+        int prev = -1;
+        for (int i = usePosList.size() - 1; i >= 0; --i) {
+            assert prev < usePosList.usePos(i) : "use positions not sorted";
+            if (i != usePosList.size() - 1) {
+                buf.append(", ");
+            }
+            buf.append(usePosList.usePos(i)).append(':').append(usePosList.registerPriority(i).shortName());
+            prev = usePosList.usePos(i);
+        }
+        buf.append("} spill-state{").append(spillState()).append("}");
+        if (canMaterialize()) {
+            buf.append(" (remat:").append(getMaterializedValue().toString()).append(")");
+        }
+        return buf.toString();
+    }
+
+    List<TraceInterval> getSplitChildren() {
+        return Collections.unmodifiableList(splitChildren);
+    }
+
+    boolean isFixedInterval() {
+        return isRegister(operand);
+    }
+
+    private static boolean isDefinitionPosition(int usePos) {
+        return (usePos & 1) == 1;
+    }
+
+    int currentFrom(int currentPosition) {
+        assert isFixedInterval();
+        for (int i = 0; i < usePosList.size(); i++) {
+            int usePos = usePosList.usePos(i);
+            if (usePos <= currentPosition && isDefinitionPosition(usePos)) {
+                return usePos;
+            }
+
+        }
+        return Integer.MAX_VALUE;
+    }
+
+    int currentIntersectsAt(int currentPosition, TraceInterval current) {
+        assert isFixedInterval();
+        assert !current.isFixedInterval();
+        int from = Integer.MAX_VALUE;
+        int to = Integer.MIN_VALUE;
+
+        for (int i = 0; i < usePosList.size(); i++) {
+            int usePos = usePosList.usePos(i);
+            if (isDefinitionPosition(usePos)) {
+                if (usePos <= currentPosition) {
+                    from = usePos;
+                    break;
+                }
+                to = Integer.MIN_VALUE;
+            } else {
+                if (to < usePos) {
+                    to = usePos;
+                }
+            }
+        }
+        if (from < current.from()) {
+            if (to <= current.from()) {
+                return -1;
+            }
+            return current.from();
+        } else {
+            if (current.to() <= from) {
+                return -1;
+            }
+            return from;
+        }
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceIntervalDumper.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import static jdk.vm.ci.code.ValueUtil.isRegister;
+import jdk.vm.ci.meta.AllocatableValue;
+import jdk.vm.ci.meta.Value;
+
+import com.oracle.graal.lir.debug.IntervalDumper;
+
+final class TraceIntervalDumper implements IntervalDumper {
+    private final FixedInterval[] fixedIntervals;
+    private final TraceInterval[] intervals;
+
+    public TraceIntervalDumper(FixedInterval[] fixedIntervals, TraceInterval[] intervals) {
+        this.fixedIntervals = fixedIntervals;
+        this.intervals = intervals;
+    }
+
+    public void visitIntervals(IntervalVisitor visitor) {
+        for (FixedInterval interval : fixedIntervals) {
+            if (interval != null) {
+                printFixedInterval(interval, visitor);
+            }
+        }
+        for (TraceInterval interval : intervals) {
+            if (interval != null) {
+                printInterval(interval, visitor);
+            }
+        }
+    }
+
+    private static void printFixedInterval(FixedInterval interval, IntervalVisitor visitor) {
+        Value hint = null;
+        AllocatableValue operand = interval.operand;
+        String type = "fixed";
+        char typeChar = operand.getPlatformKind().getTypeChar();
+        visitor.visitIntervalStart(operand, operand, operand, hint, type, typeChar);
+
+        // print ranges
+        for (FixedRange range = interval.first(); range != FixedRange.EndMarker; range = range.next) {
+            visitor.visitRange(range.from, range.to);
+        }
+
+        // no use positions
+
+        visitor.visitIntervalEnd("NOT_SUPPORTED");
+
+    }
+
+    private static void printInterval(TraceInterval interval, IntervalVisitor visitor) {
+        Value hint = interval.locationHint(false) != null ? interval.locationHint(false).location() : null;
+        AllocatableValue operand = interval.operand;
+        String type = isRegister(operand) ? "fixed" : operand.getLIRKind().getPlatformKind().toString();
+        char typeChar = operand.getPlatformKind().getTypeChar();
+        visitor.visitIntervalStart(interval.splitParent().operand, operand, interval.location(), hint, type, typeChar);
+
+        // print ranges
+        visitor.visitRange(interval.from(), interval.to());
+
+        // print use positions
+        int prev = -1;
+        UsePosList usePosList = interval.usePosList();
+        for (int i = usePosList.size() - 1; i >= 0; --i) {
+            assert prev < usePosList.usePos(i) : "use positions not sorted";
+            visitor.visitUsePos(usePosList.usePos(i), usePosList.registerPriority(i));
+            prev = usePosList.usePos(i);
+        }
+
+        visitor.visitIntervalEnd(interval.spillState());
+    }
+
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceIntervalWalker.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,323 @@
+/*
+ * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import jdk.vm.ci.common.JVMCIError;
+
+import com.oracle.graal.debug.Debug;
+import com.oracle.graal.debug.Indent;
+import com.oracle.graal.lir.alloc.trace.lsra.FixedInterval.FixedList;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceInterval.AnyList;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceInterval.RegisterBinding;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceInterval.State;
+
+/**
+ */
+class TraceIntervalWalker {
+
+    protected final TraceLinearScan allocator;
+
+    /**
+     * Sorted list of intervals, not live before the current position.
+     */
+    protected AnyList unhandledAnyList;
+
+    /**
+     * Sorted list of intervals, live at the current position.
+     */
+    protected AnyList activeAnyList;
+    protected FixedList activeFixedList;
+
+    /**
+     * Sorted list of intervals in a life time hole at the current position.
+     */
+    protected FixedList inactiveFixedList;
+
+    /**
+     * The current position (intercept point through the intervals).
+     */
+    protected int currentPosition;
+
+    /**
+     * Processes the {@code currentInterval} interval in an attempt to allocate a physical register
+     * to it and thus allow it to be moved to a list of {@linkplain #activeAnyList active}
+     * intervals.
+     *
+     * @param currentInterval The interval to be activated.
+     *
+     * @return {@code true} if a register was allocated to the {@code currentInterval} interval
+     */
+    protected boolean activateCurrent(TraceInterval currentInterval) {
+        if (Debug.isLogEnabled()) {
+            logCurrentStatus();
+        }
+        return true;
+    }
+
+    @SuppressWarnings("try")
+    protected void logCurrentStatus() {
+        try (Indent i = Debug.logAndIndent("active:")) {
+            logList(activeFixedList.getFixed());
+            logList(activeAnyList.getAny());
+        }
+        try (Indent i = Debug.logAndIndent("inactive(fixed):")) {
+            logList(inactiveFixedList.getFixed());
+        }
+    }
+
+    private void logList(FixedInterval i) {
+        for (FixedInterval interval = i; interval != FixedInterval.EndMarker; interval = interval.next) {
+            Debug.log("%s", interval.logString(allocator));
+        }
+    }
+
+    private void logList(TraceInterval i) {
+        for (TraceInterval interval = i; interval != TraceInterval.EndMarker; interval = interval.next) {
+            Debug.log("%s", interval.logString(allocator));
+        }
+    }
+
+    void walkBefore(int lirOpId) {
+        walkTo(lirOpId - 1);
+    }
+
+    void walk() {
+        walkTo(Integer.MAX_VALUE);
+    }
+
+    /**
+     * Creates a new interval walker.
+     *
+     * @param allocator the register allocator context
+     * @param unhandledFixed the list of unhandled {@linkplain RegisterBinding#Fixed fixed}
+     *            intervals
+     * @param unhandledAny the list of unhandled {@linkplain RegisterBinding#Any non-fixed}
+     *            intervals
+     */
+    TraceIntervalWalker(TraceLinearScan allocator, FixedInterval unhandledFixed, TraceInterval unhandledAny) {
+        this.allocator = allocator;
+
+        unhandledAnyList = new AnyList(unhandledAny);
+        activeAnyList = new AnyList(TraceInterval.EndMarker);
+        activeFixedList = new FixedList(FixedInterval.EndMarker);
+        // we don't need a separate unhandled list for fixed.
+        inactiveFixedList = new FixedList(unhandledFixed);
+        currentPosition = -1;
+    }
+
+    protected void removeFromList(TraceInterval interval) {
+        if (interval.state == State.Active) {
+            activeAnyList.removeAny(interval);
+        } else {
+            assert interval.state == State.Inactive : "invalid state";
+            // inactiveAnyLists.removeAny(interval);
+            throw JVMCIError.shouldNotReachHere();
+        }
+    }
+
+    /**
+     * Walks up to {@code from} and updates the state of {@link FixedInterval fixed intervals}.
+     *
+     * Fixed intervals can switch back and forth between the states {@link State#Active} and
+     * {@link State#Inactive} (and eventually to {@link State#Handled} but handled intervals are not
+     * managed).
+     */
+    @SuppressWarnings("try")
+    private void walkToFixed(State state, int from) {
+        assert state == State.Active || state == State.Inactive : "wrong state";
+        FixedInterval prevprev = null;
+        FixedInterval prev = (state == State.Active) ? activeFixedList.getFixed() : inactiveFixedList.getFixed();
+        FixedInterval next = prev;
+        if (Debug.isLogEnabled()) {
+            try (Indent i = Debug.logAndIndent("walkToFixed(%s, %d):", state, from)) {
+                logList(next);
+            }
+        }
+        while (next.currentFrom() <= from) {
+            FixedInterval cur = next;
+            next = cur.next;
+
+            boolean rangeHasChanged = false;
+            while (cur.currentTo() <= from) {
+                cur.nextRange();
+                rangeHasChanged = true;
+            }
+
+            // also handle move from inactive list to active list
+            rangeHasChanged = rangeHasChanged || (state == State.Inactive && cur.currentFrom() <= from);
+
+            if (rangeHasChanged) {
+                // remove cur from list
+                if (prevprev == null) {
+                    if (state == State.Active) {
+                        activeFixedList.setFixed(next);
+                    } else {
+                        inactiveFixedList.setFixed(next);
+                    }
+                } else {
+                    prevprev.next = next;
+                }
+                prev = next;
+                TraceInterval.State newState;
+                if (cur.currentAtEnd()) {
+                    // move to handled state (not maintained as a list)
+                    newState = State.Handled;
+                } else {
+                    if (cur.currentFrom() <= from) {
+                        // sort into active list
+                        activeFixedList.addToListSortedByCurrentFromPositions(cur);
+                        newState = State.Active;
+                    } else {
+                        // sort into inactive list
+                        inactiveFixedList.addToListSortedByCurrentFromPositions(cur);
+                        newState = State.Inactive;
+                    }
+                    if (prev == cur) {
+                        assert state == newState;
+                        prevprev = prev;
+                        prev = cur.next;
+                    }
+                }
+                intervalMoved(cur, state, newState);
+            } else {
+                prevprev = prev;
+                prev = cur.next;
+            }
+        }
+    }
+
+    /**
+     * Walks up to {@code from} and updates the state of {@link TraceInterval intervals}.
+     *
+     * Trace intervals can switch once from {@link State#Unhandled} to {@link State#Active} and then
+     * to {@link State#Handled} but handled intervals are not managed.
+     */
+    @SuppressWarnings("try")
+    private void walkToAny(int from) {
+        TraceInterval prevprev = null;
+        TraceInterval prev = activeAnyList.getAny();
+        TraceInterval next = prev;
+        if (Debug.isLogEnabled()) {
+            try (Indent i = Debug.logAndIndent("walkToAny(%d):", from)) {
+                logList(next);
+            }
+        }
+        while (next.from() <= from) {
+            TraceInterval cur = next;
+            next = cur.next;
+
+            if (cur.to() <= from) {
+                // remove cur from list
+                if (prevprev == null) {
+                    activeAnyList.setAny(next);
+                } else {
+                    prevprev.next = next;
+                }
+                intervalMoved(cur, State.Active, State.Handled);
+            } else {
+                prevprev = prev;
+            }
+            prev = next;
+        }
+    }
+
+    /**
+     * Get the next interval from {@linkplain #unhandledAnyList} which starts before or at
+     * {@code toOpId}. The returned interval is removed.
+     *
+     * @postcondition all intervals in {@linkplain #unhandledAnyList} start after {@code toOpId}.
+     *
+     * @return The next interval or null if there is no {@linkplain #unhandledAnyList unhandled}
+     *         interval at position {@code toOpId}.
+     */
+    private TraceInterval nextInterval(int toOpId) {
+        TraceInterval any = unhandledAnyList.getAny();
+
+        if (any != TraceInterval.EndMarker) {
+            TraceInterval currentInterval = unhandledAnyList.getAny();
+            if (toOpId < currentInterval.from()) {
+                return null;
+            }
+
+            unhandledAnyList.setAny(currentInterval.next);
+            currentInterval.next = TraceInterval.EndMarker;
+            return currentInterval;
+        }
+        return null;
+
+    }
+
+    /**
+     * Walk up to {@code toOpId}.
+     *
+     * @postcondition {@link #currentPosition} is set to {@code toOpId}, {@link #activeFixedList}
+     *                and {@link #inactiveFixedList} are populated and {@link TraceInterval#state}s
+     *                are up to date.
+     */
+    @SuppressWarnings("try")
+    protected void walkTo(int toOpId) {
+        assert currentPosition <= toOpId : "can not walk backwards";
+        for (TraceInterval currentInterval = nextInterval(toOpId); currentInterval != null; currentInterval = nextInterval(toOpId)) {
+            int opId = currentInterval.from();
+
+            // set currentPosition prior to call of walkTo
+            currentPosition = opId;
+
+            // update unhandled stack intervals
+            // updateUnhandledStackIntervals(opId);
+
+            // call walkTo even if currentPosition == id
+            walkToFixed(State.Active, opId);
+            walkToFixed(State.Inactive, opId);
+            walkToAny(opId);
+
+            try (Indent indent = Debug.logAndIndent("walk to op %d", opId)) {
+                currentInterval.state = State.Active;
+                if (activateCurrent(currentInterval)) {
+                    activeAnyList.addToListSortedByFromPositions(currentInterval);
+                    intervalMoved(currentInterval, State.Unhandled, State.Active);
+                }
+            }
+        }
+        // set currentPosition prior to call of walkTo
+        currentPosition = toOpId;
+
+        if (currentPosition <= allocator.maxOpId()) {
+            // update unhandled stack intervals
+            // updateUnhandledStackIntervals(toOpId);
+
+            // call walkTo if still in range
+            walkToFixed(State.Active, toOpId);
+            walkToFixed(State.Inactive, toOpId);
+            walkToAny(toOpId);
+        }
+    }
+
+    private void intervalMoved(IntervalHint interval, State from, State to) {
+        // intervalMoved() is called whenever an interval moves from one interval list to another.
+        // In the implementation of this method it is prohibited to move the interval to any list.
+        if (Debug.isLogEnabled()) {
+            Debug.log("interval moved from %s to %s: %s", from, to, interval.logString(allocator));
+        }
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScan.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,1045 @@
+/*
+ * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import static com.oracle.graal.compiler.common.GraalOptions.DetailedAsserts;
+import static com.oracle.graal.lir.LIRValueUtil.isVariable;
+import static jdk.vm.ci.code.CodeUtil.isEven;
+import static jdk.vm.ci.code.ValueUtil.asRegister;
+import static jdk.vm.ci.code.ValueUtil.asRegisterValue;
+import static jdk.vm.ci.code.ValueUtil.isIllegal;
+import static jdk.vm.ci.code.ValueUtil.isLegal;
+import static jdk.vm.ci.code.ValueUtil.isRegister;
+
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.EnumSet;
+import java.util.List;
+
+import jdk.vm.ci.code.BailoutException;
+import jdk.vm.ci.code.Register;
+import jdk.vm.ci.code.RegisterAttributes;
+import jdk.vm.ci.code.RegisterValue;
+import jdk.vm.ci.code.TargetDescription;
+import jdk.vm.ci.common.JVMCIError;
+import jdk.vm.ci.meta.AllocatableValue;
+import jdk.vm.ci.meta.LIRKind;
+import jdk.vm.ci.meta.Value;
+import jdk.vm.ci.options.NestedBooleanOptionValue;
+import jdk.vm.ci.options.Option;
+import jdk.vm.ci.options.OptionType;
+import jdk.vm.ci.options.OptionValue;
+
+import com.oracle.graal.compiler.common.alloc.RegisterAllocationConfig;
+import com.oracle.graal.compiler.common.alloc.TraceBuilder.TraceBuilderResult;
+import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
+import com.oracle.graal.compiler.common.cfg.BlockMap;
+import com.oracle.graal.debug.Debug;
+import com.oracle.graal.debug.Debug.Scope;
+import com.oracle.graal.debug.Indent;
+import com.oracle.graal.lir.LIR;
+import com.oracle.graal.lir.LIRInstruction;
+import com.oracle.graal.lir.LIRInstruction.OperandFlag;
+import com.oracle.graal.lir.LIRInstruction.OperandMode;
+import com.oracle.graal.lir.StandardOp.BlockEndOp;
+import com.oracle.graal.lir.ValueConsumer;
+import com.oracle.graal.lir.Variable;
+import com.oracle.graal.lir.VirtualStackSlot;
+import com.oracle.graal.lir.alloc.trace.TraceRegisterAllocationPhase;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceLinearScanAllocationPhase.TraceLinearScanAllocationContext;
+import com.oracle.graal.lir.framemap.FrameMapBuilder;
+import com.oracle.graal.lir.gen.LIRGenerationResult;
+import com.oracle.graal.lir.gen.LIRGeneratorTool.MoveFactory;
+import com.oracle.graal.lir.phases.LIRPhase;
+
+/**
+ * An implementation of the linear scan register allocator algorithm described in <a
+ * href="http://doi.acm.org/10.1145/1064979.1064998"
+ * >"Optimized Interval Splitting in a Linear Scan Register Allocator"</a> by Christian Wimmer and
+ * Hanspeter Moessenboeck.
+ */
+public final class TraceLinearScan {
+
+    public static class Options {
+        // @formatter:off
+        @Option(help = "Enable spill position optimization", type = OptionType.Debug)
+        public static final OptionValue<Boolean> LIROptTraceRAEliminateSpillMoves = new NestedBooleanOptionValue(LIRPhase.Options.LIROptimization, true);
+        // @formatter:on
+    }
+
+    private static final TraceLinearScanRegisterAllocationPhase TRACE_LINEAR_SCAN_REGISTER_ALLOCATION_PHASE = new TraceLinearScanRegisterAllocationPhase();
+    private static final TraceLinearScanAssignLocationsPhase TRACE_LINEAR_SCAN_ASSIGN_LOCATIONS_PHASE = new TraceLinearScanAssignLocationsPhase();
+    private static final TraceLinearScanEliminateSpillMovePhase TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE = new TraceLinearScanEliminateSpillMovePhase();
+    private static final TraceLinearScanResolveDataFlowPhase TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE = new TraceLinearScanResolveDataFlowPhase();
+    private static final TraceLinearScanLifetimeAnalysisPhase TRACE_LINEAR_SCAN_LIFETIME_ANALYSIS_PHASE = new TraceLinearScanLifetimeAnalysisPhase();
+
+    public static class BlockData {
+
+        /**
+         * Bit map specifying which operands are live upon entry to this block. These are values
+         * used in this block or any of its successors where such value are not defined in this
+         * block. The bit index of an operand is its
+         * {@linkplain TraceLinearScan#operandNumber(Value) operand number}.
+         */
+        public BitSet liveIn;
+
+        /**
+         * Bit map specifying which operands are live upon exit from this block. These are values
+         * used in a successor block that are either defined in this block or were live upon entry
+         * to this block. The bit index of an operand is its
+         * {@linkplain TraceLinearScan#operandNumber(Value) operand number}.
+         */
+        public BitSet liveOut;
+
+        /**
+         * Bit map specifying which operands are used (before being defined) in this block. That is,
+         * these are the values that are live upon entry to the block. The bit index of an operand
+         * is its {@linkplain TraceLinearScan#operandNumber(Value) operand number}.
+         */
+        public BitSet liveGen;
+
+        /**
+         * Bit map specifying which operands are defined/overwritten in this block. The bit index of
+         * an operand is its {@linkplain TraceLinearScan#operandNumber(Value) operand number}.
+         */
+        public BitSet liveKill;
+    }
+
+    public static final int DOMINATOR_SPILL_MOVE_ID = -2;
+    private static final int SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT = 1;
+
+    private final LIR ir;
+    private final FrameMapBuilder frameMapBuilder;
+    private final RegisterAttributes[] registerAttributes;
+    private final Register[] registers;
+    private final RegisterAllocationConfig regAllocConfig;
+    private final MoveFactory moveFactory;
+
+    private final BlockMap<BlockData> blockData;
+
+    /**
+     * List of blocks in linear-scan order. This is only correct as long as the CFG does not change.
+     */
+    private final List<? extends AbstractBlockBase<?>> sortedBlocks;
+
+    /** @see #fixedIntervals() */
+    private final FixedInterval[] fixedIntervals;
+
+    /** @see #intervals() */
+    private TraceInterval[] intervals;
+
+    /**
+     * The number of valid entries in {@link #intervals}.
+     */
+    private int intervalsSize;
+
+    /**
+     * The index of the first entry in {@link #intervals} for a
+     * {@linkplain #createDerivedInterval(TraceInterval) derived interval}.
+     */
+    private int firstDerivedIntervalIndex = -1;
+
+    /**
+     * Intervals sorted by {@link TraceInterval#from()}.
+     */
+    private TraceInterval[] sortedIntervals;
+
+    /**
+     * Fixed intervals sorted by {@link FixedInterval#from()}.
+     */
+    private FixedInterval[] sortedFixedIntervals;
+
+    /**
+     * Map from an instruction {@linkplain LIRInstruction#id id} to the instruction. Entries should
+     * be retrieved with {@link #instructionForId(int)} as the id is not simply an index into this
+     * array.
+     */
+    private LIRInstruction[] opIdToInstructionMap;
+
+    /**
+     * Map from an instruction {@linkplain LIRInstruction#id id} to the
+     * {@linkplain AbstractBlockBase block} containing the instruction. Entries should be retrieved
+     * with {@link #blockForId(int)} as the id is not simply an index into this array.
+     */
+    private AbstractBlockBase<?>[] opIdToBlockMap;
+
+    protected final TraceBuilderResult<?> traceBuilderResult;
+    private final boolean neverSpillConstants;
+
+    public TraceLinearScan(TargetDescription target, LIRGenerationResult res, MoveFactory spillMoveFactory, RegisterAllocationConfig regAllocConfig, List<? extends AbstractBlockBase<?>> sortedBlocks,
+                    TraceBuilderResult<?> traceBuilderResult, boolean neverSpillConstants) {
+        this.ir = res.getLIR();
+        this.moveFactory = spillMoveFactory;
+        this.frameMapBuilder = res.getFrameMapBuilder();
+        this.sortedBlocks = sortedBlocks;
+        this.registerAttributes = regAllocConfig.getRegisterConfig().getAttributesMap();
+        this.regAllocConfig = regAllocConfig;
+
+        this.registers = target.arch.getRegisters();
+        this.fixedIntervals = new FixedInterval[registers.length];
+        this.blockData = new BlockMap<>(ir.getControlFlowGraph());
+        this.traceBuilderResult = traceBuilderResult;
+        this.neverSpillConstants = neverSpillConstants;
+    }
+
+    public int getFirstLirInstructionId(AbstractBlockBase<?> block) {
+        int result = ir.getLIRforBlock(block).get(0).id();
+        assert result >= 0;
+        return result;
+    }
+
+    public int getLastLirInstructionId(AbstractBlockBase<?> block) {
+        List<LIRInstruction> instructions = ir.getLIRforBlock(block);
+        int result = instructions.get(instructions.size() - 1).id();
+        assert result >= 0;
+        return result;
+    }
+
+    public MoveFactory getSpillMoveFactory() {
+        return moveFactory;
+    }
+
+    protected TraceLocalMoveResolver createMoveResolver() {
+        TraceLocalMoveResolver moveResolver = new TraceLocalMoveResolver(this);
+        assert moveResolver.checkEmpty();
+        return moveResolver;
+    }
+
+    public static boolean isVariableOrRegister(Value value) {
+        return isVariable(value) || isRegister(value);
+    }
+
+    /**
+     * Converts an operand (variable or register) to an index in a flat address space covering all
+     * the {@linkplain Variable variables} and {@linkplain RegisterValue registers} being processed
+     * by this allocator.
+     */
+    @SuppressWarnings("static-method")
+    int operandNumber(Value operand) {
+        assert !isRegister(operand) : "Register do not have operand numbers: " + operand;
+        assert isVariable(operand) : "Unsupported Value " + operand;
+        return ((Variable) operand).index;
+    }
+
+    /**
+     * Gets the number of operands. This value will increase by 1 for new variable.
+     */
+    int operandSize() {
+        return ir.numVariables();
+    }
+
+    /**
+     * Gets the number of registers. This value will never change.
+     */
+    int numRegisters() {
+        return registers.length;
+    }
+
+    public BlockData getBlockData(AbstractBlockBase<?> block) {
+        return blockData.get(block);
+    }
+
+    void initBlockData(AbstractBlockBase<?> block) {
+        blockData.put(block, new BlockData());
+    }
+
+    static final IntervalPredicate IS_PRECOLORED_INTERVAL = new IntervalPredicate() {
+
+        @Override
+        public boolean apply(TraceInterval i) {
+            return isRegister(i.operand);
+        }
+    };
+
+    static final IntervalPredicate IS_VARIABLE_INTERVAL = new IntervalPredicate() {
+
+        @Override
+        public boolean apply(TraceInterval i) {
+            return isVariable(i.operand);
+        }
+    };
+
+    static final IntervalPredicate IS_STACK_INTERVAL = new IntervalPredicate() {
+
+        @Override
+        public boolean apply(TraceInterval i) {
+            return !isRegister(i.operand);
+        }
+    };
+
+    /**
+     * Gets an object describing the attributes of a given register according to this register
+     * configuration.
+     */
+    public RegisterAttributes attributes(Register reg) {
+        return registerAttributes[reg.number];
+    }
+
+    void assignSpillSlot(TraceInterval interval) {
+        /*
+         * Assign the canonical spill slot of the parent (if a part of the interval is already
+         * spilled) or allocate a new spill slot.
+         */
+        if (interval.canMaterialize()) {
+            interval.assignLocation(Value.ILLEGAL);
+        } else if (interval.spillSlot() != null) {
+            interval.assignLocation(interval.spillSlot());
+        } else {
+            VirtualStackSlot slot = frameMapBuilder.allocateSpillSlot(interval.kind());
+            interval.setSpillSlot(slot);
+            interval.assignLocation(slot);
+        }
+    }
+
+    /**
+     * Map from {@linkplain #operandNumber(Value) operand numbers} to intervals.
+     */
+    public TraceInterval[] intervals() {
+        return intervals;
+    }
+
+    /**
+     * Map from {@linkplain #operandNumber(Value) operand numbers} to intervals.
+     */
+    public FixedInterval[] fixedIntervals() {
+        return fixedIntervals;
+    }
+
+    void initIntervals() {
+        intervalsSize = operandSize();
+        intervals = new TraceInterval[intervalsSize + (intervalsSize >> SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT)];
+    }
+
+    /**
+     * Creates a new fixed interval.
+     *
+     * @param reg the operand for the interval
+     * @return the created interval
+     */
+    FixedInterval createFixedInterval(RegisterValue reg) {
+        FixedInterval interval = new FixedInterval(reg);
+        int operandNumber = reg.getRegister().number;
+        assert fixedIntervals[operandNumber] == null;
+        fixedIntervals[operandNumber] = interval;
+        return interval;
+    }
+
+    /**
+     * Creates a new interval.
+     *
+     * @param operand the operand for the interval
+     * @return the created interval
+     */
+    TraceInterval createInterval(AllocatableValue operand) {
+        assert isLegal(operand);
+        int operandNumber = operandNumber(operand);
+        TraceInterval interval = new TraceInterval(operand, operandNumber);
+        assert operandNumber < intervalsSize;
+        assert intervals[operandNumber] == null;
+        intervals[operandNumber] = interval;
+        return interval;
+    }
+
+    /**
+     * Creates an interval as a result of splitting or spilling another interval.
+     *
+     * @param source an interval being split of spilled
+     * @return a new interval derived from {@code source}
+     */
+    TraceInterval createDerivedInterval(TraceInterval source) {
+        if (firstDerivedIntervalIndex == -1) {
+            firstDerivedIntervalIndex = intervalsSize;
+        }
+        if (intervalsSize == intervals.length) {
+            intervals = Arrays.copyOf(intervals, intervals.length + (intervals.length >> SPLIT_INTERVALS_CAPACITY_RIGHT_SHIFT));
+        }
+        intervalsSize++;
+        Variable variable = new Variable(source.kind(), ir.nextVariable());
+
+        TraceInterval interval = createInterval(variable);
+        assert intervals[intervalsSize - 1] == interval;
+        return interval;
+    }
+
+    // access to block list (sorted in linear scan order)
+    public int blockCount() {
+        return sortedBlocks.size();
+    }
+
+    public AbstractBlockBase<?> blockAt(int index) {
+        return sortedBlocks.get(index);
+    }
+
+    /**
+     * Gets the size of the {@link BlockData#liveIn} and {@link BlockData#liveOut} sets for a basic
+     * block. These sets do not include any operands allocated as a result of creating
+     * {@linkplain #createDerivedInterval(TraceInterval) derived intervals}.
+     */
+    public int liveSetSize() {
+        return firstDerivedIntervalIndex == -1 ? operandSize() : firstDerivedIntervalIndex;
+    }
+
+    int numLoops() {
+        return ir.getControlFlowGraph().getLoops().size();
+    }
+
+    public FixedInterval fixedIntervalFor(RegisterValue reg) {
+        return fixedIntervals[reg.getRegister().number];
+    }
+
+    public FixedInterval getOrCreateFixedInterval(RegisterValue reg) {
+        FixedInterval ret = fixedIntervalFor(reg);
+        if (ret == null) {
+            return createFixedInterval(reg);
+        } else {
+            return ret;
+        }
+    }
+
+    TraceInterval intervalFor(int operandNumber) {
+        return intervals[operandNumber];
+    }
+
+    public TraceInterval intervalFor(Value operand) {
+        int operandNumber = operandNumber(operand);
+        assert operandNumber < intervalsSize;
+        return intervals[operandNumber];
+    }
+
+    public TraceInterval getOrCreateInterval(AllocatableValue operand) {
+        TraceInterval ret = intervalFor(operand);
+        if (ret == null) {
+            return createInterval(operand);
+        } else {
+            return ret;
+        }
+    }
+
+    void initOpIdMaps(int numInstructions) {
+        opIdToInstructionMap = new LIRInstruction[numInstructions];
+        opIdToBlockMap = new AbstractBlockBase<?>[numInstructions];
+    }
+
+    void putOpIdMaps(int index, LIRInstruction op, AbstractBlockBase<?> block) {
+        opIdToInstructionMap[index] = op;
+        opIdToBlockMap[index] = block;
+    }
+
+    /**
+     * Gets the highest instruction id allocated by this object.
+     */
+    int maxOpId() {
+        assert opIdToInstructionMap.length > 0 : "no operations";
+        return (opIdToInstructionMap.length - 1) << 1;
+    }
+
+    /**
+     * Converts an {@linkplain LIRInstruction#id instruction id} to an instruction index. All LIR
+     * instructions in a method have an index one greater than their linear-scan order predecessor
+     * with the first instruction having an index of 0.
+     */
+    private static int opIdToIndex(int opId) {
+        return opId >> 1;
+    }
+
+    /**
+     * Retrieves the {@link LIRInstruction} based on its {@linkplain LIRInstruction#id id}.
+     *
+     * @param opId an instruction {@linkplain LIRInstruction#id id}
+     * @return the instruction whose {@linkplain LIRInstruction#id} {@code == id}
+     */
+    public LIRInstruction instructionForId(int opId) {
+        assert isEven(opId) : "opId not even";
+        LIRInstruction instr = opIdToInstructionMap[opIdToIndex(opId)];
+        assert instr.id() == opId;
+        return instr;
+    }
+
+    /**
+     * Gets the block containing a given instruction.
+     *
+     * @param opId an instruction {@linkplain LIRInstruction#id id}
+     * @return the block containing the instruction denoted by {@code opId}
+     */
+    public AbstractBlockBase<?> blockForId(int opId) {
+        assert opIdToBlockMap.length > 0 && opId >= 0 && opId <= maxOpId() + 1 : "opId out of range: " + opId;
+        return opIdToBlockMap[opIdToIndex(opId)];
+    }
+
+    boolean isBlockBegin(int opId) {
+        return opId == 0 || blockForId(opId) != blockForId(opId - 1);
+    }
+
+    boolean isBlockEnd(int opId) {
+        boolean isBlockBegin = isBlockBegin(opId + 2);
+        assert isBlockBegin == (instructionForId(opId & (~1)) instanceof BlockEndOp);
+        return isBlockBegin;
+    }
+
+    boolean coversBlockBegin(int opId1, int opId2) {
+        return blockForId(opId1) != blockForId(opId2);
+    }
+
+    /**
+     * Determines if an {@link LIRInstruction} destroys all caller saved registers.
+     *
+     * @param opId an instruction {@linkplain LIRInstruction#id id}
+     * @return {@code true} if the instruction denoted by {@code id} destroys all caller saved
+     *         registers.
+     */
+    boolean hasCall(int opId) {
+        assert isEven(opId) : "opId not even";
+        return instructionForId(opId).destroysCallerSavedRegisters();
+    }
+
+    abstract static class IntervalPredicate {
+
+        abstract boolean apply(TraceInterval i);
+    }
+
+    public boolean isProcessed(Value operand) {
+        return !isRegister(operand) || attributes(asRegister(operand)).isAllocatable();
+    }
+
+    // * Phase 5: actual register allocation
+
+    private static <T extends IntervalHint> boolean isSortedByFrom(T[] intervals) {
+        int from = -1;
+        for (T interval : intervals) {
+            assert interval != null;
+            assert from <= interval.from();
+            from = interval.from();
+        }
+        return true;
+    }
+
+    private static boolean isSortedBySpillPos(TraceInterval[] intervals) {
+        int from = -1;
+        for (TraceInterval interval : intervals) {
+            assert interval != null;
+            assert from <= interval.spillDefinitionPos();
+            from = interval.spillDefinitionPos();
+        }
+        return true;
+    }
+
+    private static TraceInterval addToList(TraceInterval first, TraceInterval prev, TraceInterval interval) {
+        TraceInterval newFirst = first;
+        if (prev != null) {
+            prev.next = interval;
+        } else {
+            newFirst = interval;
+        }
+        return newFirst;
+    }
+
+    TraceInterval createUnhandledListByFrom(IntervalPredicate isList1) {
+        assert isSortedByFrom(sortedIntervals) : "interval list is not sorted";
+        return createUnhandledList(isList1);
+    }
+
+    TraceInterval createUnhandledListBySpillPos(IntervalPredicate isList1) {
+        assert isSortedBySpillPos(sortedIntervals) : "interval list is not sorted";
+        return createUnhandledList(isList1);
+    }
+
+    private TraceInterval createUnhandledList(IntervalPredicate isList1) {
+
+        TraceInterval list1 = TraceInterval.EndMarker;
+
+        TraceInterval list1Prev = null;
+        TraceInterval v;
+
+        int n = sortedIntervals.length;
+        for (int i = 0; i < n; i++) {
+            v = sortedIntervals[i];
+            if (v == null) {
+                continue;
+            }
+
+            if (isList1.apply(v)) {
+                list1 = addToList(list1, list1Prev, v);
+                list1Prev = v;
+            }
+        }
+
+        if (list1Prev != null) {
+            list1Prev.next = TraceInterval.EndMarker;
+        }
+
+        assert list1Prev == null || list1Prev.next == TraceInterval.EndMarker : "linear list ends not with sentinel";
+
+        return list1;
+    }
+
+    private static FixedInterval addToList(FixedInterval first, FixedInterval prev, FixedInterval interval) {
+        FixedInterval newFirst = first;
+        if (prev != null) {
+            prev.next = interval;
+        } else {
+            newFirst = interval;
+        }
+        return newFirst;
+    }
+
+    FixedInterval createFixedUnhandledList() {
+        assert isSortedByFrom(sortedFixedIntervals) : "interval list is not sorted";
+
+        FixedInterval list1 = FixedInterval.EndMarker;
+
+        FixedInterval list1Prev = null;
+        FixedInterval v;
+
+        int n = sortedFixedIntervals.length;
+        for (int i = 0; i < n; i++) {
+            v = sortedFixedIntervals[i];
+            if (v == null) {
+                continue;
+            }
+
+            v.rewindRange();
+            list1 = addToList(list1, list1Prev, v);
+            list1Prev = v;
+        }
+
+        if (list1Prev != null) {
+            list1Prev.next = FixedInterval.EndMarker;
+        }
+
+        assert list1Prev == null || list1Prev.next == FixedInterval.EndMarker : "linear list ends not with sentinel";
+
+        return list1;
+    }
+
+    // SORTING
+
+    protected void sortIntervalsBeforeAllocation() {
+        int sortedLen = 0;
+        for (TraceInterval interval : intervals) {
+            if (interval != null) {
+                sortedLen++;
+            }
+        }
+        sortedIntervals = sortIntervalsBeforeAllocation(intervals, new TraceInterval[sortedLen]);
+    }
+
+    protected void sortFixedIntervalsBeforeAllocation() {
+        int sortedLen = 0;
+        for (FixedInterval interval : fixedIntervals) {
+            if (interval != null) {
+                sortedLen++;
+            }
+        }
+        sortedFixedIntervals = sortIntervalsBeforeAllocation(fixedIntervals, new FixedInterval[sortedLen]);
+    }
+
+    private static <T extends IntervalHint> T[] sortIntervalsBeforeAllocation(T[] intervals, T[] sortedList) {
+        int sortedIdx = 0;
+        int sortedFromMax = -1;
+
+        // special sorting algorithm: the original interval-list is almost sorted,
+        // only some intervals are swapped. So this is much faster than a complete QuickSort
+        for (T interval : intervals) {
+            if (interval != null) {
+                int from = interval.from();
+
+                if (sortedFromMax <= from) {
+                    sortedList[sortedIdx++] = interval;
+                    sortedFromMax = interval.from();
+                } else {
+                    // the assumption that the intervals are already sorted failed,
+                    // so this interval must be sorted in manually
+                    int j;
+                    for (j = sortedIdx - 1; j >= 0 && from < sortedList[j].from(); j--) {
+                        sortedList[j + 1] = sortedList[j];
+                    }
+                    sortedList[j + 1] = interval;
+                    sortedIdx++;
+                }
+            }
+        }
+        return sortedList;
+    }
+
+    void sortIntervalsAfterAllocation() {
+        if (firstDerivedIntervalIndex == -1) {
+            // no intervals have been added during allocation, so sorted list is already up to date
+            return;
+        }
+
+        TraceInterval[] oldList = sortedIntervals;
+        TraceInterval[] newList = Arrays.copyOfRange(intervals, firstDerivedIntervalIndex, intervalsSize);
+        int oldLen = oldList.length;
+        int newLen = newList.length;
+
+        // conventional sort-algorithm for new intervals
+        Arrays.sort(newList, (TraceInterval a, TraceInterval b) -> a.from() - b.from());
+
+        // merge old and new list (both already sorted) into one combined list
+        TraceInterval[] combinedList = new TraceInterval[oldLen + newLen];
+        int oldIdx = 0;
+        int newIdx = 0;
+
+        while (oldIdx + newIdx < combinedList.length) {
+            if (newIdx >= newLen || (oldIdx < oldLen && oldList[oldIdx].from() <= newList[newIdx].from())) {
+                combinedList[oldIdx + newIdx] = oldList[oldIdx];
+                oldIdx++;
+            } else {
+                combinedList[oldIdx + newIdx] = newList[newIdx];
+                newIdx++;
+            }
+        }
+
+        sortedIntervals = combinedList;
+    }
+
+    void sortIntervalsBySpillPos() {
+        // TODO (JE): better algorithm?
+        // conventional sort-algorithm for new intervals
+        Arrays.sort(sortedIntervals, (TraceInterval a, TraceInterval b) -> a.spillDefinitionPos() - b.spillDefinitionPos());
+    }
+
+    // wrapper for Interval.splitChildAtOpId that performs a bailout in product mode
+    // instead of returning null
+    public TraceInterval splitChildAtOpId(TraceInterval interval, int opId, LIRInstruction.OperandMode mode) {
+        TraceInterval result = interval.getSplitChildAtOpId(opId, mode, this);
+
+        if (result != null) {
+            if (Debug.isLogEnabled()) {
+                Debug.log("Split child at pos %d of interval %s is %s", opId, interval, result);
+            }
+            return result;
+        }
+
+        throw new BailoutException("LinearScan: interval is null");
+    }
+
+    static AllocatableValue canonicalSpillOpr(TraceInterval interval) {
+        assert interval.spillSlot() != null : "canonical spill slot not set";
+        return interval.spillSlot();
+    }
+
+    boolean isMaterialized(AllocatableValue operand, int opId, OperandMode mode) {
+        TraceInterval interval = intervalFor(operand);
+        assert interval != null : "interval must exist";
+
+        if (opId != -1) {
+            /*
+             * Operands are not changed when an interval is split during allocation, so search the
+             * right interval here.
+             */
+            interval = splitChildAtOpId(interval, opId, mode);
+        }
+
+        return isIllegal(interval.location()) && interval.canMaterialize();
+    }
+
+    boolean isCallerSave(Value operand) {
+        return attributes(asRegister(operand)).isCallerSave();
+    }
+
+    @SuppressWarnings("try")
+    public <B extends AbstractBlockBase<B>> void allocate(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder, MoveFactory spillMoveFactory,
+                    RegisterAllocationConfig registerAllocationConfig) {
+
+        /*
+         * This is the point to enable debug logging for the whole register allocation.
+         */
+        try (Indent indent = Debug.logAndIndent("LinearScan allocate")) {
+            TraceLinearScanAllocationContext context = new TraceLinearScanAllocationContext(spillMoveFactory, registerAllocationConfig, traceBuilderResult, this);
+
+            TRACE_LINEAR_SCAN_LIFETIME_ANALYSIS_PHASE.apply(target, lirGenRes, codeEmittingOrder, linearScanOrder, context, false);
+
+            try (Scope s = Debug.scope("AfterLifetimeAnalysis", (Object) intervals())) {
+                sortIntervalsBeforeAllocation();
+                sortFixedIntervalsBeforeAllocation();
+
+                TRACE_LINEAR_SCAN_REGISTER_ALLOCATION_PHASE.apply(target, lirGenRes, codeEmittingOrder, linearScanOrder, context, false);
+
+                // resolve intra-trace data-flow
+                TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE.apply(target, lirGenRes, codeEmittingOrder, linearScanOrder, context, false);
+                Debug.dump(TraceRegisterAllocationPhase.TRACE_DUMP_LEVEL, sortedBlocks(), "%s", TRACE_LINEAR_SCAN_RESOLVE_DATA_FLOW_PHASE.getName());
+
+                // eliminate spill moves
+                if (Options.LIROptTraceRAEliminateSpillMoves.getValue()) {
+                    TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE.apply(target, lirGenRes, codeEmittingOrder, linearScanOrder, context, false);
+                    Debug.dump(TraceRegisterAllocationPhase.TRACE_DUMP_LEVEL, sortedBlocks(), "%s", TRACE_LINEAR_SCAN_ELIMINATE_SPILL_MOVE_PHASE.getName());
+                }
+
+                TRACE_LINEAR_SCAN_ASSIGN_LOCATIONS_PHASE.apply(target, lirGenRes, codeEmittingOrder, linearScanOrder, context, false);
+
+                if (DetailedAsserts.getValue()) {
+                    verifyIntervals();
+                }
+            } catch (Throwable e) {
+                throw Debug.handle(e);
+            }
+        }
+    }
+
+    @SuppressWarnings("try")
+    public void printIntervals(String label) {
+        if (Debug.isDumpEnabled(TraceRegisterAllocationPhase.TRACE_DUMP_LEVEL)) {
+            if (Debug.isLogEnabled()) {
+                try (Indent indent = Debug.logAndIndent("intervals %s", label)) {
+                    for (FixedInterval interval : fixedIntervals) {
+                        if (interval != null) {
+                            Debug.log("%s", interval.logString(this));
+                        }
+                    }
+
+                    for (TraceInterval interval : intervals) {
+                        if (interval != null) {
+                            Debug.log("%s", interval.logString(this));
+                        }
+                    }
+
+                    try (Indent indent2 = Debug.logAndIndent("Basic Blocks")) {
+                        for (int i = 0; i < blockCount(); i++) {
+                            AbstractBlockBase<?> block = blockAt(i);
+                            Debug.log("B%d [%d, %d, %s] ", block.getId(), getFirstLirInstructionId(block), getLastLirInstructionId(block), block.getLoop());
+                        }
+                    }
+                }
+            }
+            Debug.dump(new TraceIntervalDumper(Arrays.copyOf(fixedIntervals, fixedIntervals.length), Arrays.copyOf(intervals, intervalsSize)), label);
+        }
+    }
+
+    public void printLir(String label, @SuppressWarnings("unused") boolean hirValid) {
+        if (Debug.isDumpEnabled(TraceRegisterAllocationPhase.TRACE_DUMP_LEVEL)) {
+            Debug.dump(TraceRegisterAllocationPhase.TRACE_DUMP_LEVEL, sortedBlocks(), label);
+        }
+    }
+
+    boolean verify() {
+        // (check that all intervals have a correct register and that no registers are overwritten)
+        verifyIntervals();
+
+        verifyRegisters();
+
+        Debug.log("no errors found");
+
+        return true;
+    }
+
+    @SuppressWarnings("try")
+    private void verifyRegisters() {
+        // Enable this logging to get output for the verification process.
+        try (Indent indent = Debug.logAndIndent("verifying register allocation")) {
+            RegisterVerifier verifier = new RegisterVerifier(this);
+            verifier.verify(blockAt(0));
+        }
+    }
+
+    @SuppressWarnings("try")
+    protected void verifyIntervals() {
+        try (Indent indent = Debug.logAndIndent("verifying intervals")) {
+            int len = intervalsSize;
+
+            for (int i = 0; i < len; i++) {
+                final TraceInterval i1 = intervals[i];
+                if (i1 == null) {
+                    continue;
+                }
+
+                i1.checkSplitChildren();
+
+                if (i1.operandNumber != i) {
+                    Debug.log("Interval %d is on position %d in list", i1.operandNumber, i);
+                    Debug.log(i1.logString(this));
+                    throw new JVMCIError("");
+                }
+
+                if (isVariable(i1.operand) && i1.kind().equals(LIRKind.Illegal)) {
+                    Debug.log("Interval %d has no type assigned", i1.operandNumber);
+                    Debug.log(i1.logString(this));
+                    throw new JVMCIError("");
+                }
+
+                if (i1.location() == null) {
+                    Debug.log("Interval %d has no register assigned", i1.operandNumber);
+                    Debug.log(i1.logString(this));
+                    throw new JVMCIError("");
+                }
+
+                if (i1.isEmpty()) {
+                    Debug.log("Interval %d has no Range", i1.operandNumber);
+                    Debug.log(i1.logString(this));
+                    throw new JVMCIError("");
+                }
+
+                if (i1.from() >= i1.to()) {
+                    Debug.log("Interval %d has zero length range", i1.operandNumber);
+                    Debug.log(i1.logString(this));
+                    throw new JVMCIError("");
+                }
+
+                // special intervals that are created in MoveResolver
+                // . ignore them because the range information has no meaning there
+                if (i1.from() == 1 && i1.to() == 2) {
+                    continue;
+                }
+                // check any intervals
+                for (int j = i + 1; j < len; j++) {
+                    final TraceInterval i2 = intervals[j];
+                    if (i2 == null) {
+                        continue;
+                    }
+
+                    // special intervals that are created in MoveResolver
+                    // . ignore them because the range information has no meaning there
+                    if (i2.from() == 1 && i2.to() == 2) {
+                        continue;
+                    }
+                    Value l1 = i1.location();
+                    Value l2 = i2.location();
+                    boolean intersects = i1.intersects(i2);
+                    if (intersects && !isIllegal(l1) && (l1.equals(l2))) {
+                        if (DetailedAsserts.getValue()) {
+                            Debug.log("Intervals %s and %s overlap and have the same register assigned", i1, i2);
+                            Debug.log(i1.logString(this));
+                            Debug.log(i2.logString(this));
+                        }
+                        throw new BailoutException("");
+                    }
+                }
+                // check fixed intervals
+                for (FixedInterval i2 : fixedIntervals) {
+                    if (i2 == null) {
+                        continue;
+                    }
+
+                    Value l1 = i1.location();
+                    Value l2 = i2.location();
+                    boolean intersects = i2.intersects(i1);
+                    if (intersects && !isIllegal(l1) && (l1.equals(l2))) {
+                        if (DetailedAsserts.getValue()) {
+                            Debug.log("Intervals %s and %s overlap and have the same register assigned", i1, i2);
+                            Debug.log(i1.logString(this));
+                            Debug.log(i2.logString(this));
+                        }
+                        throw new BailoutException("");
+                    }
+                }
+            }
+        }
+    }
+
+    class CheckConsumer implements ValueConsumer {
+
+        boolean ok;
+        FixedInterval curInterval;
+
+        @Override
+        public void visitValue(Value operand, OperandMode mode, EnumSet<OperandFlag> flags) {
+            if (isRegister(operand)) {
+                if (fixedIntervalFor(asRegisterValue(operand)) == curInterval) {
+                    ok = true;
+                }
+            }
+        }
+    }
+
+    @SuppressWarnings("try")
+    void verifyNoOopsInFixedIntervals() {
+        try (Indent indent = Debug.logAndIndent("verifying that no oops are in fixed intervals *")) {
+            CheckConsumer checkConsumer = new CheckConsumer();
+
+            TraceInterval otherIntervals;
+            FixedInterval fixedInts = createFixedUnhandledList();
+            // to ensure a walking until the last instruction id, add a dummy interval
+            // with a high operation id
+            otherIntervals = new TraceInterval(Value.ILLEGAL, -1);
+            otherIntervals.addRange(Integer.MAX_VALUE - 2, Integer.MAX_VALUE - 1);
+            TraceIntervalWalker iw = new TraceIntervalWalker(this, fixedInts, otherIntervals);
+
+            for (AbstractBlockBase<?> block : sortedBlocks) {
+                List<LIRInstruction> instructions = ir.getLIRforBlock(block);
+
+                for (int j = 0; j < instructions.size(); j++) {
+                    LIRInstruction op = instructions.get(j);
+
+                    if (op.hasState()) {
+                        iw.walkBefore(op.id());
+                        boolean checkLive = true;
+
+                        /*
+                         * Make sure none of the fixed registers is live across an oopmap since we
+                         * can't handle that correctly.
+                         */
+                        if (checkLive) {
+                            for (FixedInterval interval = iw.activeFixedList.getFixed(); interval != FixedInterval.EndMarker; interval = interval.next) {
+                                if (interval.to() > op.id() + 1) {
+                                    /*
+                                     * This interval is live out of this op so make sure that this
+                                     * interval represents some value that's referenced by this op
+                                     * either as an input or output.
+                                     */
+                                    checkConsumer.curInterval = interval;
+                                    checkConsumer.ok = false;
+
+                                    op.visitEachInput(checkConsumer);
+                                    op.visitEachAlive(checkConsumer);
+                                    op.visitEachTemp(checkConsumer);
+                                    op.visitEachOutput(checkConsumer);
+
+                                    assert checkConsumer.ok : "fixed intervals should never be live across an oopmap point";
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    public LIR getLIR() {
+        return ir;
+    }
+
+    public FrameMapBuilder getFrameMapBuilder() {
+        return frameMapBuilder;
+    }
+
+    public List<? extends AbstractBlockBase<?>> sortedBlocks() {
+        return sortedBlocks;
+    }
+
+    public Register[] getRegisters() {
+        return registers;
+    }
+
+    public RegisterAllocationConfig getRegisterAllocationConfig() {
+        return regAllocConfig;
+    }
+
+    public boolean callKillsRegisters() {
+        return regAllocConfig.getRegisterConfig().areAllAllocatableRegistersCallerSaved();
+    }
+
+    boolean neverSpillConstants() {
+        return neverSpillConstants;
+    }
+
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanAllocationPhase.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import com.oracle.graal.compiler.common.alloc.RegisterAllocationConfig;
+import com.oracle.graal.compiler.common.alloc.TraceBuilder.TraceBuilderResult;
+import com.oracle.graal.lir.gen.LIRGeneratorTool.MoveFactory;
+import com.oracle.graal.lir.phases.LIRPhase;
+
+abstract class TraceLinearScanAllocationPhase extends LIRPhase<TraceLinearScanAllocationPhase.TraceLinearScanAllocationContext> {
+
+    static final class TraceLinearScanAllocationContext {
+        public final MoveFactory spillMoveFactory;
+        public final RegisterAllocationConfig registerAllocationConfig;
+        public final TraceBuilderResult<?> traceBuilderResult;
+        public final TraceLinearScan allocator;
+
+        TraceLinearScanAllocationContext(MoveFactory spillMoveFactory, RegisterAllocationConfig registerAllocationConfig, TraceBuilderResult<?> traceBuilderResult, TraceLinearScan allocator) {
+            this.spillMoveFactory = spillMoveFactory;
+            this.registerAllocationConfig = registerAllocationConfig;
+            this.traceBuilderResult = traceBuilderResult;
+            this.allocator = allocator;
+        }
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanAssignLocationsPhase.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,287 @@
+/*
+ * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import static com.oracle.graal.compiler.common.GraalOptions.DetailedAsserts;
+import static com.oracle.graal.lir.LIRValueUtil.isConstantValue;
+import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
+import static com.oracle.graal.lir.LIRValueUtil.isVariable;
+import static com.oracle.graal.lir.LIRValueUtil.isVirtualStackSlot;
+import static com.oracle.graal.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAshareSpillInformation;
+import static jdk.vm.ci.code.ValueUtil.isIllegal;
+import static jdk.vm.ci.code.ValueUtil.isRegister;
+
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.List;
+
+import jdk.vm.ci.code.RegisterValue;
+import jdk.vm.ci.code.StackSlot;
+import jdk.vm.ci.code.TargetDescription;
+import jdk.vm.ci.meta.AllocatableValue;
+import jdk.vm.ci.meta.Value;
+
+import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
+import com.oracle.graal.debug.Debug;
+import com.oracle.graal.debug.Indent;
+import com.oracle.graal.lir.ConstantValue;
+import com.oracle.graal.lir.InstructionValueProcedure;
+import com.oracle.graal.lir.LIRFrameState;
+import com.oracle.graal.lir.LIRInstruction;
+import com.oracle.graal.lir.LIRInstruction.OperandFlag;
+import com.oracle.graal.lir.LIRInstruction.OperandMode;
+import com.oracle.graal.lir.StandardOp;
+import com.oracle.graal.lir.StandardOp.BlockEndOp;
+import com.oracle.graal.lir.StandardOp.LabelOp;
+import com.oracle.graal.lir.StandardOp.MoveOp;
+import com.oracle.graal.lir.StandardOp.ValueMoveOp;
+import com.oracle.graal.lir.Variable;
+import com.oracle.graal.lir.alloc.trace.ShadowedRegisterValue;
+import com.oracle.graal.lir.gen.LIRGenerationResult;
+import com.oracle.graal.lir.gen.LIRGeneratorTool.MoveFactory;
+
+/**
+ * Specialization of {@link com.oracle.graal.lir.alloc.lsra.LinearScanAssignLocationsPhase} that
+ * inserts {@link ShadowedRegisterValue}s to describe {@link RegisterValue}s that are also available
+ * on the {@link StackSlot stack}.
+ */
+final class TraceLinearScanAssignLocationsPhase extends TraceLinearScanAllocationPhase {
+
+    @Override
+    protected <B extends AbstractBlockBase<B>> void run(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder,
+                    TraceLinearScanAllocationContext context) {
+        TraceLinearScan allocator = context.allocator;
+        MoveFactory spillMoveFactory = context.spillMoveFactory;
+        new Assigner(allocator, spillMoveFactory).assignLocations();
+    }
+
+    private static final class Assigner {
+        private final TraceLinearScan allocator;
+        private final MoveFactory spillMoveFactory;
+
+        private Assigner(TraceLinearScan allocator, MoveFactory spillMoveFactory) {
+            this.allocator = allocator;
+            this.spillMoveFactory = spillMoveFactory;
+        }
+
+        /**
+         * Assigns the allocated location for an LIR instruction operand back into the instruction.
+         *
+         * @param op current {@link LIRInstruction}
+         * @param operand an LIR instruction operand
+         * @param mode the usage mode for {@code operand} by the instruction
+         * @return the location assigned for the operand
+         */
+        private Value colorLirOperand(LIRInstruction op, Variable operand, OperandMode mode) {
+            int opId = op.id();
+            TraceInterval interval = allocator.intervalFor(operand);
+            assert interval != null : "interval must exist";
+
+            if (opId != -1) {
+                if (DetailedAsserts.getValue()) {
+                    AbstractBlockBase<?> block = allocator.blockForId(opId);
+                    if (block.getSuccessorCount() <= 1 && opId == allocator.getLastLirInstructionId(block)) {
+                        /*
+                         * Check if spill moves could have been appended at the end of this block,
+                         * but before the branch instruction. So the split child information for
+                         * this branch would be incorrect.
+                         */
+                        LIRInstruction instr = allocator.getLIR().getLIRforBlock(block).get(allocator.getLIR().getLIRforBlock(block).size() - 1);
+                        if (instr instanceof StandardOp.JumpOp) {
+                            if (allocator.getBlockData(block).liveOut.get(allocator.operandNumber(operand))) {
+                                assert false : String.format(
+                                                "can't get split child for the last branch of a block because the information would be incorrect (moves are inserted before the branch in resolveDataFlow) block=%s, instruction=%s, operand=%s",
+                                                block, instr, operand);
+                            }
+                        }
+                    }
+                }
+
+                /*
+                 * Operands are not changed when an interval is split during allocation, so search
+                 * the right interval here.
+                 */
+                interval = allocator.splitChildAtOpId(interval, opId, mode);
+            }
+
+            if (isIllegal(interval.location()) && interval.canMaterialize()) {
+                assert mode != OperandMode.DEF;
+                return new ConstantValue(interval.kind(), interval.getMaterializedValue());
+            }
+            return interval.location();
+        }
+
+        /**
+         * @param op
+         * @param operand
+         * @param valueMode
+         * @param flags
+         * @see InstructionValueProcedure#doValue(LIRInstruction, Value, OperandMode, EnumSet)
+         */
+        private Value debugInfoProcedure(LIRInstruction op, Value operand, OperandMode valueMode, EnumSet<OperandFlag> flags) {
+            if (isVirtualStackSlot(operand)) {
+                return operand;
+            }
+            int tempOpId = op.id();
+            OperandMode mode = OperandMode.USE;
+            AbstractBlockBase<?> block = allocator.blockForId(tempOpId);
+            if (block.getSuccessorCount() == 1 && tempOpId == allocator.getLastLirInstructionId(block)) {
+                /*
+                 * Generating debug information for the last instruction of a block. If this
+                 * instruction is a branch, spill moves are inserted before this branch and so the
+                 * wrong operand would be returned (spill moves at block boundaries are not
+                 * considered in the live ranges of intervals).
+                 *
+                 * Solution: use the first opId of the branch target block instead.
+                 */
+                final LIRInstruction instr = allocator.getLIR().getLIRforBlock(block).get(allocator.getLIR().getLIRforBlock(block).size() - 1);
+                if (instr instanceof StandardOp.JumpOp) {
+                    if (allocator.getBlockData(block).liveOut.get(allocator.operandNumber(operand))) {
+                        tempOpId = allocator.getFirstLirInstructionId(block.getSuccessors().iterator().next());
+                        mode = OperandMode.DEF;
+                    }
+                }
+            }
+
+            /*
+             * Get current location of operand. The operand must be live because debug information
+             * is considered when building the intervals if the interval is not live,
+             * colorLirOperand will cause an assert on failure.
+             */
+            Value result = colorLirOperand(op, (Variable) operand, mode);
+            assert !allocator.hasCall(tempOpId) || isStackSlotValue(result) || isConstantValue(result) || !allocator.isCallerSave(result) : "cannot have caller-save register operands at calls";
+            return result;
+        }
+
+        private void computeDebugInfo(final LIRInstruction op, LIRFrameState info) {
+            info.forEachState(op, this::debugInfoProcedure);
+        }
+
+        private void assignLocations(List<LIRInstruction> instructions) {
+            int numInst = instructions.size();
+            boolean hasDead = false;
+
+            for (int j = 0; j < numInst; j++) {
+                final LIRInstruction op = instructions.get(j);
+                if (op == null) {
+                    /*
+                     * this can happen when spill-moves are removed in eliminateSpillMoves
+                     */
+                    hasDead = true;
+                } else if (assignLocations(op, instructions, j)) {
+                    hasDead = true;
+                }
+            }
+
+            if (hasDead) {
+                // Remove null values from the list.
+                instructions.removeAll(Collections.singleton(null));
+            }
+        }
+
+        /**
+         * Assigns the operand of an {@link LIRInstruction}.
+         *
+         * @param op The {@link LIRInstruction} that should be colored.
+         * @param j The index of {@code op} in the {@code instructions} list.
+         * @param instructions The instructions of the current block.
+         * @return {@code true} if the instruction was deleted.
+         */
+        private boolean assignLocations(LIRInstruction op, List<LIRInstruction> instructions, int j) {
+            assert op != null && instructions.get(j) == op;
+            if (TraceRAshareSpillInformation.getValue()) {
+                if (op instanceof BlockEndOp) {
+                    ((BlockEndOp) op).forEachOutgoingValue(colorOutgoingIncomingValues);
+                } else if (op instanceof LabelOp) {
+                    ((LabelOp) op).forEachIncomingValue(colorOutgoingIncomingValues);
+                }
+            }
+
+            InstructionValueProcedure assignProc = (inst, operand, mode, flags) -> isVariable(operand) ? colorLirOperand(inst, (Variable) operand, mode) : operand;
+            // remove useless moves
+            if (op instanceof MoveOp) {
+                AllocatableValue result = ((MoveOp) op).getResult();
+                if (isVariable(result) && allocator.isMaterialized(result, op.id(), OperandMode.DEF)) {
+                    /*
+                     * This happens if a materializable interval is originally not spilled but then
+                     * kicked out in LinearScanWalker.splitForSpilling(). When kicking out such an
+                     * interval this move operation was already generated.
+                     */
+                    instructions.set(j, null);
+                    return true;
+                }
+            }
+
+            op.forEachInput(assignProc);
+            op.forEachAlive(assignProc);
+            op.forEachTemp(assignProc);
+            op.forEachOutput(assignProc);
+
+            // compute reference map and debug information
+            op.forEachState((inst, state) -> computeDebugInfo(inst, state));
+
+            // remove useless moves
+            if (op instanceof ValueMoveOp) {
+                ValueMoveOp move = (ValueMoveOp) op;
+                if (move.getInput().equals(move.getResult())) {
+                    instructions.set(j, null);
+                    return true;
+                }
+                if (isStackSlotValue(move.getInput()) && isStackSlotValue(move.getResult())) {
+                    // rewrite stack to stack moves
+                    instructions.set(j, spillMoveFactory.createStackMove(move.getResult(), move.getInput()));
+                    return true;
+                }
+            }
+            return false;
+        }
+
+        @SuppressWarnings("try")
+        private void assignLocations() {
+            try (Indent indent = Debug.logAndIndent("assign locations")) {
+                for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
+                    try (Indent indent2 = Debug.logAndIndent("assign locations in block B%d", block.getId())) {
+                        assignLocations(allocator.getLIR().getLIRforBlock(block));
+                    }
+                }
+            }
+        }
+
+        private InstructionValueProcedure colorOutgoingIncomingValues = new InstructionValueProcedure() {
+
+            public Value doValue(LIRInstruction instruction, Value value, OperandMode mode, EnumSet<OperandFlag> flags) {
+                if (isVariable(value)) {
+                    TraceInterval interval = allocator.intervalFor(value);
+                    assert interval != null : "interval must exist";
+                    interval = allocator.splitChildAtOpId(interval, instruction.id(), mode);
+
+                    if (interval.inMemoryAt(instruction.id()) && isRegister(interval.location())) {
+                        return new ShadowedRegisterValue((RegisterValue) interval.location(), interval.spillSlot());
+                    }
+                }
+                return value;
+            }
+        };
+    }
+
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanEliminateSpillMovePhase.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,237 @@
+/*
+ * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import static com.oracle.graal.compiler.common.GraalOptions.DetailedAsserts;
+import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
+import static com.oracle.graal.lir.LIRValueUtil.isVariable;
+import static jdk.vm.ci.code.ValueUtil.isRegister;
+
+import java.util.List;
+
+import jdk.vm.ci.code.TargetDescription;
+import jdk.vm.ci.meta.AllocatableValue;
+
+import com.oracle.graal.compiler.common.alloc.TraceBuilder.TraceBuilderResult;
+import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
+import com.oracle.graal.debug.Debug;
+import com.oracle.graal.debug.Indent;
+import com.oracle.graal.lir.LIRInsertionBuffer;
+import com.oracle.graal.lir.LIRInstruction;
+import com.oracle.graal.lir.LIRInstruction.OperandMode;
+import com.oracle.graal.lir.StandardOp.LoadConstantOp;
+import com.oracle.graal.lir.StandardOp.MoveOp;
+import com.oracle.graal.lir.StandardOp.ValueMoveOp;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceInterval.SpillState;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceLinearScan.IntervalPredicate;
+import com.oracle.graal.lir.gen.LIRGenerationResult;
+
+final class TraceLinearScanEliminateSpillMovePhase extends TraceLinearScanAllocationPhase {
+
+    private static final IntervalPredicate spilledIntervals = new TraceLinearScan.IntervalPredicate() {
+
+        @Override
+        public boolean apply(TraceInterval i) {
+            return i.isSplitParent() && SpillState.IN_MEMORY.contains(i.spillState());
+        }
+    };
+
+    @Override
+    protected <B extends AbstractBlockBase<B>> void run(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder,
+                    TraceLinearScanAllocationContext context) {
+        TraceBuilderResult<?> traceBuilderResult = context.traceBuilderResult;
+        TraceLinearScan allocator = context.allocator;
+        boolean shouldEliminateSpillMoves = shouldEliminateSpillMoves(traceBuilderResult, allocator);
+        eliminateSpillMoves(allocator, shouldEliminateSpillMoves, traceBuilderResult);
+    }
+
+    private static boolean shouldEliminateSpillMoves(TraceBuilderResult<?> traceBuilderResult, TraceLinearScan allocator) {
+        return !traceBuilderResult.incomingSideEdges(traceBuilderResult.getTraceForBlock(allocator.sortedBlocks().get(0)));
+    }
+
+    // called once before assignment of register numbers
+    @SuppressWarnings("try")
+    private static void eliminateSpillMoves(TraceLinearScan allocator, boolean shouldEliminateSpillMoves, TraceBuilderResult<?> traceBuilderResult) {
+        try (Indent indent = Debug.logAndIndent("Eliminating unnecessary spill moves: Trace%d", traceBuilderResult.getTraceForBlock(allocator.sortedBlocks().get(0)))) {
+            allocator.sortIntervalsBySpillPos();
+
+            /*
+             * collect all intervals that must be stored after their definition. The list is sorted
+             * by Interval.spillDefinitionPos.
+             */
+            TraceInterval interval = allocator.createUnhandledListBySpillPos(spilledIntervals);
+            if (DetailedAsserts.getValue()) {
+                checkIntervals(interval);
+            }
+            if (Debug.isLogEnabled()) {
+                try (Indent indent2 = Debug.logAndIndent("Sorted intervals")) {
+                    for (TraceInterval i = interval; i != null; i = i.next) {
+                        Debug.log("%5d: %s", i.spillDefinitionPos(), i);
+                    }
+                }
+            }
+
+            LIRInsertionBuffer insertionBuffer = new LIRInsertionBuffer();
+            for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
+                try (Indent indent1 = Debug.logAndIndent("Handle %s", block)) {
+                    List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
+                    int numInst = instructions.size();
+
+                    int lastOpId = -1;
+                    // iterate all instructions of the block.
+                    for (int j = 0; j < numInst; j++) {
+                        LIRInstruction op = instructions.get(j);
+                        int opId = op.id();
+                        try (Indent indent2 = Debug.logAndIndent("%5d %s", opId, op)) {
+
+                            if (opId == -1) {
+                                MoveOp move = (MoveOp) op;
+                                /*
+                                 * Remove move from register to stack if the stack slot is
+                                 * guaranteed to be correct. Only moves that have been inserted by
+                                 * LinearScan can be removed.
+                                 */
+                                if (shouldEliminateSpillMoves && canEliminateSpillMove(allocator, block, move, lastOpId)) {
+                                    /*
+                                     * Move target is a stack slot that is always correct, so
+                                     * eliminate instruction.
+                                     */
+                                    if (Debug.isLogEnabled()) {
+                                        if (move instanceof ValueMoveOp) {
+                                            ValueMoveOp vmove = (ValueMoveOp) move;
+                                            Debug.log("eliminating move from interval %d (%s) to %d (%s) in block %s", allocator.operandNumber(vmove.getInput()), vmove.getInput(),
+                                                            allocator.operandNumber(vmove.getResult()), vmove.getResult(), block);
+                                        } else {
+                                            LoadConstantOp load = (LoadConstantOp) move;
+                                            Debug.log("eliminating constant load from %s to %d (%s) in block %s", load.getConstant(), allocator.operandNumber(load.getResult()), load.getResult(),
+                                                            block);
+                                        }
+                                    }
+
+                                    // null-instructions are deleted by assignRegNum
+                                    instructions.set(j, null);
+                                }
+
+                            } else {
+                                lastOpId = opId;
+                                /*
+                                 * Insert move from register to stack just after the beginning of
+                                 * the interval.
+                                 */
+                                // assert interval == TraceInterval.EndMarker ||
+                                // interval.spillDefinitionPos() >= opId : "invalid order";
+                                assert interval == TraceInterval.EndMarker || (interval.isSplitParent() && SpillState.IN_MEMORY.contains(interval.spillState())) : "invalid interval";
+
+                                while (interval != TraceInterval.EndMarker && interval.spillDefinitionPos() == opId) {
+                                    Debug.log("handle %s", interval);
+                                    if (!interval.canMaterialize()) {
+                                        if (!insertionBuffer.initialized()) {
+                                            /*
+                                             * prepare insertion buffer (appended when all
+                                             * instructions in the block are processed)
+                                             */
+                                            insertionBuffer.init(instructions);
+                                        }
+
+                                        AllocatableValue fromLocation = interval.getSplitChildAtOpId(opId, OperandMode.DEF, allocator).location();
+                                        AllocatableValue toLocation = TraceLinearScan.canonicalSpillOpr(interval);
+                                        if (!fromLocation.equals(toLocation)) {
+
+                                            assert isRegister(fromLocation) : "from operand must be a register but is: " + fromLocation + " toLocation=" + toLocation + " spillState=" +
+                                                            interval.spillState();
+                                            assert isStackSlotValue(toLocation) : "to operand must be a stack slot";
+
+                                            LIRInstruction move = allocator.getSpillMoveFactory().createMove(toLocation, fromLocation);
+                                            insertionBuffer.append(j + 1, move);
+
+                                            if (Debug.isLogEnabled()) {
+                                                Debug.log("inserting move after definition of interval %d to stack slot %s at opId %d", interval.operandNumber, interval.spillSlot(), opId);
+                                            }
+                                        }
+                                    }
+                                    interval = interval.next;
+                                }
+                            }
+                        }
+                    } // end of instruction iteration
+
+                    if (insertionBuffer.initialized()) {
+                        insertionBuffer.finish();
+                    }
+                }
+            } // end of block iteration
+
+            assert interval == TraceInterval.EndMarker : "missed an interval";
+        }
+    }
+
+    /**
+     * @param allocator
+     * @param block The block {@code move} is located in.
+     * @param move Spill move.
+     * @param lastOpId The id of last "normal" instruction before the spill move. (Spill moves have
+     *            no valid opId but -1.)
+     */
+    private static boolean canEliminateSpillMove(TraceLinearScan allocator, AbstractBlockBase<?> block, MoveOp move, int lastOpId) {
+        assert isVariable(move.getResult()) : "LinearScan inserts only moves to variables: " + move;
+        assert lastOpId >= 0 : "Invalid lastOpId: " + lastOpId;
+
+        TraceInterval curInterval = allocator.intervalFor(move.getResult());
+
+        if (!isRegister(curInterval.location()) && curInterval.inMemoryAt(lastOpId) && isPhiResolutionMove(allocator, move)) {
+            assert isStackSlotValue(curInterval.location()) : "Not a stack slot: " + curInterval.location();
+            return true;
+        }
+        return false;
+    }
+
+    private static boolean isPhiResolutionMove(TraceLinearScan allocator, MoveOp move) {
+        TraceInterval curInterval = allocator.intervalFor(move.getResult());
+        return !curInterval.isSplitParent();
+    }
+
+    private static void checkIntervals(TraceInterval interval) {
+        TraceInterval prev = null;
+        TraceInterval temp = interval;
+        while (temp != TraceInterval.EndMarker) {
+            assert temp.spillDefinitionPos() >= 0 : "invalid spill definition pos";
+            if (prev != null) {
+                // assert temp.from() >= prev.from() : "intervals not sorted";
+                assert temp.spillDefinitionPos() >= prev.spillDefinitionPos() : "when intervals are sorted by from :  then they must also be sorted by spillDefinitionPos";
+            }
+
+            assert temp.spillSlot() != null || temp.canMaterialize() : "interval has no spill slot assigned";
+            assert temp.spillDefinitionPos() >= temp.from() : "invalid order";
+            // assert temp.spillDefinitionPos() <= temp.from() + 2 :
+            // "only intervals defined once at their start-pos can be optimized";
+
+            if (Debug.isLogEnabled()) {
+                Debug.log("interval %d (from %d to %d) must be stored at %d", temp.operandNumber, temp.from(), temp.to(), temp.spillDefinitionPos());
+            }
+
+            prev = temp;
+            temp = temp.next;
+        }
+    }
+
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanLifetimeAnalysisPhase.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,678 @@
+/*
+ * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import static com.oracle.graal.lir.LIRValueUtil.asVariable;
+import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
+import static com.oracle.graal.lir.LIRValueUtil.isVariable;
+import static com.oracle.graal.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAshareSpillInformation;
+import static com.oracle.graal.lir.alloc.trace.TraceRegisterAllocationPhase.Options.TraceRAuseInterTraceHints;
+import static com.oracle.graal.lir.alloc.trace.TraceUtil.asShadowedRegisterValue;
+import static com.oracle.graal.lir.alloc.trace.TraceUtil.isShadowedRegisterValue;
+import static com.oracle.graal.lir.alloc.trace.lsra.TraceLinearScan.isVariableOrRegister;
+import static jdk.vm.ci.code.ValueUtil.asRegisterValue;
+import static jdk.vm.ci.code.ValueUtil.asStackSlot;
+import static jdk.vm.ci.code.ValueUtil.isRegister;
+import static jdk.vm.ci.code.ValueUtil.isStackSlot;
+
+import java.util.BitSet;
+import java.util.EnumSet;
+import java.util.List;
+
+import jdk.vm.ci.code.BailoutException;
+import jdk.vm.ci.code.Register;
+import jdk.vm.ci.code.RegisterValue;
+import jdk.vm.ci.code.TargetDescription;
+import jdk.vm.ci.common.JVMCIError;
+import jdk.vm.ci.meta.AllocatableValue;
+import jdk.vm.ci.meta.JavaConstant;
+import jdk.vm.ci.meta.LIRKind;
+import jdk.vm.ci.meta.Value;
+
+import com.oracle.graal.compiler.common.alloc.ComputeBlockOrder;
+import com.oracle.graal.compiler.common.alloc.TraceBuilder.TraceBuilderResult;
+import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
+import com.oracle.graal.debug.Debug;
+import com.oracle.graal.debug.Indent;
+import com.oracle.graal.lir.InstructionValueConsumer;
+import com.oracle.graal.lir.LIR;
+import com.oracle.graal.lir.LIRInstruction;
+import com.oracle.graal.lir.LIRInstruction.OperandFlag;
+import com.oracle.graal.lir.LIRInstruction.OperandMode;
+import com.oracle.graal.lir.LIRValueUtil;
+import com.oracle.graal.lir.StandardOp.BlockEndOp;
+import com.oracle.graal.lir.StandardOp.LabelOp;
+import com.oracle.graal.lir.StandardOp.LoadConstantOp;
+import com.oracle.graal.lir.StandardOp.ValueMoveOp;
+import com.oracle.graal.lir.ValueConsumer;
+import com.oracle.graal.lir.Variable;
+import com.oracle.graal.lir.alloc.trace.ShadowedRegisterValue;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceInterval.RegisterPriority;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceInterval.SpillState;
+import com.oracle.graal.lir.gen.LIRGenerationResult;
+import com.oracle.graal.lir.ssi.SSIUtil;
+
+final class TraceLinearScanLifetimeAnalysisPhase extends TraceLinearScanAllocationPhase {
+
+    @Override
+    protected <B extends AbstractBlockBase<B>> void run(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder,
+                    TraceLinearScanAllocationContext context) {
+        TraceBuilderResult<?> traceBuilderResult = context.traceBuilderResult;
+        TraceLinearScan allocator = context.allocator;
+        new Analyser(allocator, traceBuilderResult).analyze();
+    }
+
+    private static final class Analyser {
+        private static final int DUMP_DURING_ANALYSIS_LEVEL = 4;
+        private final TraceLinearScan allocator;
+        private final TraceBuilderResult<?> traceBuilderResult;
+
+        /**
+         * @param linearScan
+         * @param traceBuilderResult
+         */
+        private Analyser(TraceLinearScan linearScan, TraceBuilderResult<?> traceBuilderResult) {
+            allocator = linearScan;
+            this.traceBuilderResult = traceBuilderResult;
+        }
+
+        private void analyze() {
+            numberInstructions();
+            allocator.printLir("Before register allocation", true);
+            buildIntervals();
+        }
+
+        private boolean sameTrace(AbstractBlockBase<?> a, AbstractBlockBase<?> b) {
+            return traceBuilderResult.getTraceForBlock(b) == traceBuilderResult.getTraceForBlock(a);
+        }
+
+        private boolean isAllocatedOrCurrent(AbstractBlockBase<?> currentBlock, AbstractBlockBase<?> other) {
+            return traceBuilderResult.getTraceForBlock(other) <= traceBuilderResult.getTraceForBlock(currentBlock);
+        }
+
+        private static void setHint(final LIRInstruction op, TraceInterval to, IntervalHint from) {
+            IntervalHint currentHint = to.locationHint(false);
+            if (currentHint == null) {
+                /*
+                 * Update hint if there was none or if the hint interval starts after the hinted
+                 * interval.
+                 */
+                to.setLocationHint(from);
+                if (Debug.isLogEnabled()) {
+                    Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to);
+                }
+            }
+        }
+
+        /**
+         * Numbers all instructions in all blocks. The numbering follows the
+         * {@linkplain ComputeBlockOrder linear scan order}.
+         */
+        private void numberInstructions() {
+
+            allocator.initIntervals();
+
+            ValueConsumer setVariableConsumer = (value, mode, flags) -> {
+                if (isVariable(value)) {
+                    allocator.getOrCreateInterval(asVariable(value));
+                }
+            };
+
+            // Assign IDs to LIR nodes and build a mapping, lirOps, from ID to LIRInstruction node.
+            int numInstructions = 0;
+            for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
+                numInstructions += allocator.getLIR().getLIRforBlock(block).size();
+            }
+
+            // initialize with correct length
+            allocator.initOpIdMaps(numInstructions);
+
+            int opId = 0;
+            int index = 0;
+            for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
+                allocator.initBlockData(block);
+
+                List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
+
+                int numInst = instructions.size();
+                for (int j = 0; j < numInst; j++) {
+                    LIRInstruction op = instructions.get(j);
+                    op.setId(opId);
+
+                    allocator.putOpIdMaps(index, op, block);
+                    assert allocator.instructionForId(opId) == op : "must match";
+
+                    op.visitEachTemp(setVariableConsumer);
+                    op.visitEachOutput(setVariableConsumer);
+
+                    index++;
+                    opId += 2; // numbering of lirOps by two
+                }
+            }
+            assert index == numInstructions : "must match";
+            assert (index << 1) == opId : "must match: " + (index << 1);
+        }
+
+        private void addUse(AllocatableValue operand, int from, int to, RegisterPriority registerPriority, LIRKind kind) {
+            if (!allocator.isProcessed(operand)) {
+                return;
+            }
+            if (isRegister(operand)) {
+                addFixedUse(asRegisterValue(operand), from, to);
+            } else {
+                assert isVariable(operand) : operand;
+                addVariableUse(asVariable(operand), from, to, registerPriority, kind);
+            }
+        }
+
+        private void addFixedUse(RegisterValue reg, int from, int to) {
+            FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
+            interval.addRange(from, to);
+            if (Debug.isLogEnabled()) {
+                Debug.log("add fixed use: %s, at %d", interval, to);
+            }
+        }
+
+        private void addVariableUse(Variable operand, int from, int to, RegisterPriority registerPriority, LIRKind kind) {
+            TraceInterval interval = allocator.getOrCreateInterval(operand);
+
+            if (!kind.equals(LIRKind.Illegal)) {
+                interval.setKind(kind);
+            }
+
+            interval.addRange(from, to);
+
+            // Register use position at even instruction id.
+            interval.addUsePos(to & ~1, registerPriority);
+
+            if (Debug.isLogEnabled()) {
+                Debug.log("add use: %s, at %d (%s)", interval, to, registerPriority.name());
+            }
+        }
+
+        private void addTemp(AllocatableValue operand, int tempPos, RegisterPriority registerPriority, LIRKind kind) {
+            if (!allocator.isProcessed(operand)) {
+                return;
+            }
+            if (isRegister(operand)) {
+                addFixedTemp(asRegisterValue(operand), tempPos);
+            } else {
+                assert isVariable(operand) : operand;
+                addVariableTemp(asVariable(operand), tempPos, registerPriority, kind);
+            }
+        }
+
+        private void addFixedTemp(RegisterValue reg, int tempPos) {
+            FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
+            interval.addRange(tempPos, tempPos + 1);
+            if (Debug.isLogEnabled()) {
+                Debug.log("add fixed temp: %s, at %d", interval, tempPos);
+            }
+        }
+
+        private void addVariableTemp(Variable operand, int tempPos, RegisterPriority registerPriority, LIRKind kind) {
+            TraceInterval interval = allocator.getOrCreateInterval(operand);
+
+            if (!kind.equals(LIRKind.Illegal)) {
+                interval.setKind(kind);
+            }
+
+            if (interval.isEmpty()) {
+                interval.addRange(tempPos, tempPos + 1);
+            } else if (interval.from() > tempPos) {
+                interval.setFrom(tempPos);
+            }
+
+            interval.addUsePos(tempPos, registerPriority);
+            interval.addMaterializationValue(null);
+
+            if (Debug.isLogEnabled()) {
+                Debug.log("add temp: %s tempPos %d (%s)", interval, tempPos, RegisterPriority.MustHaveRegister.name());
+            }
+        }
+
+        private void addDef(AllocatableValue operand, LIRInstruction op, RegisterPriority registerPriority, LIRKind kind) {
+            if (!allocator.isProcessed(operand)) {
+                return;
+            }
+            if (isRegister(operand)) {
+                addFixedDef(asRegisterValue(operand), op);
+            } else {
+                assert isVariable(operand) : operand;
+                addVariableDef(asVariable(operand), op, registerPriority, kind);
+            }
+        }
+
+        private void addFixedDef(RegisterValue reg, LIRInstruction op) {
+            FixedInterval interval = allocator.getOrCreateFixedInterval(reg);
+            int defPos = op.id();
+            if (interval.from() <= defPos) {
+                /*
+                 * Update the starting point (when a range is first created for a use, its start is
+                 * the beginning of the current block until a def is encountered).
+                 */
+                interval.setFrom(defPos);
+
+            } else {
+                /*
+                 * Dead value - make vacuous interval also add register priority for dead intervals
+                 */
+                interval.addRange(defPos, defPos + 1);
+                if (Debug.isLogEnabled()) {
+                    Debug.log("Warning: def of operand %s at %d occurs without use", reg, defPos);
+                }
+            }
+            if (Debug.isLogEnabled()) {
+                Debug.log("add fixed def: %s, at %d", interval, defPos);
+            }
+        }
+
+        private void addVariableDef(Variable operand, LIRInstruction op, RegisterPriority registerPriority, LIRKind kind) {
+            int defPos = op.id();
+
+            TraceInterval interval = allocator.getOrCreateInterval(operand);
+
+            if (!kind.equals(LIRKind.Illegal)) {
+                interval.setKind(kind);
+            }
+
+            if (interval.isEmpty()) {
+                /*
+                 * Dead value - make vacuous interval also add register priority for dead intervals
+                 */
+                interval.addRange(defPos, defPos + 1);
+                interval.addUsePos(defPos, registerPriority);
+                if (Debug.isLogEnabled()) {
+                    Debug.log("Warning: def of operand %s at %d occurs without use", operand, defPos);
+                }
+            } else {
+                /*
+                 * Update the starting point (when a range is first created for a use, its start is
+                 * the beginning of the current block until a def is encountered).
+                 */
+                interval.setFrom(defPos);
+                interval.addUsePos(defPos, registerPriority);
+            }
+
+            changeSpillDefinitionPos(op, operand, interval, defPos);
+            if (registerPriority == RegisterPriority.None && interval.spillState().ordinal() <= SpillState.StartInMemory.ordinal() && isStackSlot(operand)) {
+                // detection of method-parameters and roundfp-results
+                interval.setSpillState(SpillState.StartInMemory);
+            }
+            interval.addMaterializationValue(getMaterializedValue(op, operand, interval));
+
+            if (Debug.isLogEnabled()) {
+                Debug.log("add def: %s defPos %d (%s)", interval, defPos, registerPriority.name());
+            }
+        }
+
+        private void addRegisterHint(final LIRInstruction op, final Value targetValue, OperandMode mode, EnumSet<OperandFlag> flags, final boolean hintAtDef) {
+            if (flags.contains(OperandFlag.HINT) && TraceLinearScan.isVariableOrRegister(targetValue)) {
+
+                op.forEachRegisterHint(targetValue, mode, (registerHint, valueMode, valueFlags) -> {
+                    if (TraceLinearScan.isVariableOrRegister(registerHint)) {
+                        /*
+                         * TODO (je): clean up
+                         */
+                        final AllocatableValue fromValue;
+                        final AllocatableValue toValue;
+                        /* hints always point from def to use */
+                        if (hintAtDef) {
+                            fromValue = (AllocatableValue) registerHint;
+                            toValue = (AllocatableValue) targetValue;
+                        } else {
+                            fromValue = (AllocatableValue) targetValue;
+                            toValue = (AllocatableValue) registerHint;
+                        }
+                        Debug.log("addRegisterHint %s to %s", fromValue, toValue);
+                        final TraceInterval to;
+                        final IntervalHint from;
+                        if (isRegister(toValue)) {
+                            if (isRegister(fromValue)) {
+                                // fixed to fixed move
+                                return null;
+                            }
+                            from = getIntervalHint(toValue);
+                            to = allocator.getOrCreateInterval(fromValue);
+                        } else {
+                            to = allocator.getOrCreateInterval(toValue);
+                            from = getIntervalHint(fromValue);
+                        }
+
+                        to.setLocationHint(from);
+                        if (Debug.isLogEnabled()) {
+                            Debug.log("operation at opId %d: added hint from interval %s to %s", op.id(), from, to);
+                        }
+
+                        return registerHint;
+                    }
+                    return null;
+                });
+            }
+        }
+
+        private IntervalHint getIntervalHint(AllocatableValue from) {
+            if (isRegister(from)) {
+                return allocator.getOrCreateFixedInterval(asRegisterValue(from));
+            }
+            return allocator.getOrCreateInterval(from);
+        }
+
+        /**
+         * Eliminates moves from register to stack if the stack slot is known to be correct.
+         *
+         * @param op
+         * @param operand
+         */
+        private void changeSpillDefinitionPos(LIRInstruction op, AllocatableValue operand, TraceInterval interval, int defPos) {
+            assert interval.isSplitParent() : "can only be called for split parents";
+
+            switch (interval.spillState()) {
+                case NoDefinitionFound:
+                    // assert interval.spillDefinitionPos() == -1 : "must no be set before";
+                    interval.setSpillDefinitionPos(defPos);
+                    if (!(op instanceof LabelOp)) {
+                        // Do not update state for labels. This will be done afterwards.
+                        interval.setSpillState(SpillState.NoSpillStore);
+                    }
+                    break;
+
+                case NoSpillStore:
+                    assert defPos <= interval.spillDefinitionPos() : "positions are processed in reverse order when intervals are created";
+                    if (defPos < interval.spillDefinitionPos() - 2) {
+                        /*
+                         * Second definition found, so no spill optimization possible for this
+                         * interval.
+                         */
+                        interval.setSpillState(SpillState.NoOptimization);
+                    } else {
+                        // two consecutive definitions (because of two-operand LIR form)
+                        assert allocator.blockForId(defPos) == allocator.blockForId(interval.spillDefinitionPos()) : "block must be equal";
+                    }
+                    break;
+
+                case NoOptimization:
+                    // nothing to do
+                    break;
+
+                default:
+                    throw new BailoutException("other states not allowed at this time");
+            }
+        }
+
+        private static boolean optimizeMethodArgument(Value value) {
+            /*
+             * Object method arguments that are passed on the stack are currently not optimized
+             * because this requires that the runtime visits method arguments during stack walking.
+             */
+            return isStackSlot(value) && asStackSlot(value).isInCallerFrame() && value.getLIRKind().isValue();
+        }
+
+        /**
+         * Determines the register priority for an instruction's output/result operand.
+         */
+        private static RegisterPriority registerPriorityOfOutputOperand(LIRInstruction op) {
+            if (op instanceof LabelOp) {
+                // skip method header
+                return RegisterPriority.None;
+            }
+            if (op instanceof ValueMoveOp) {
+                ValueMoveOp move = (ValueMoveOp) op;
+                if (optimizeMethodArgument(move.getInput())) {
+                    return RegisterPriority.None;
+                }
+            }
+
+            // all other operands require a register
+            return RegisterPriority.MustHaveRegister;
+        }
+
+        /**
+         * Determines the priority which with an instruction's input operand will be allocated a
+         * register.
+         */
+        private static RegisterPriority registerPriorityOfInputOperand(EnumSet<OperandFlag> flags) {
+            if (flags.contains(OperandFlag.OUTGOING)) {
+                return RegisterPriority.None;
+            }
+            if (flags.contains(OperandFlag.STACK)) {
+                return RegisterPriority.ShouldHaveRegister;
+            }
+            // all other operands require a register
+            return RegisterPriority.MustHaveRegister;
+        }
+
+        @SuppressWarnings("try")
+        private void buildIntervals() {
+
+            try (Indent indent = Debug.logAndIndent("build intervals")) {
+                InstructionValueConsumer outputConsumer = (op, operand, mode, flags) -> {
+                    if (TraceLinearScan.isVariableOrRegister(operand)) {
+                        addDef((AllocatableValue) operand, op, registerPriorityOfOutputOperand(op), operand.getLIRKind());
+                        addRegisterHint(op, operand, mode, flags, true);
+                    }
+                };
+
+                InstructionValueConsumer tempConsumer = (op, operand, mode, flags) -> {
+                    if (TraceLinearScan.isVariableOrRegister(operand)) {
+                        addTemp((AllocatableValue) operand, op.id(), RegisterPriority.MustHaveRegister, operand.getLIRKind());
+                        addRegisterHint(op, operand, mode, flags, false);
+                    }
+                };
+
+                InstructionValueConsumer aliveConsumer = (op, operand, mode, flags) -> {
+                    if (TraceLinearScan.isVariableOrRegister(operand)) {
+                        RegisterPriority p = registerPriorityOfInputOperand(flags);
+                        int opId = op.id();
+                        int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
+                        addUse((AllocatableValue) operand, blockFrom, opId + 1, p, operand.getLIRKind());
+                        addRegisterHint(op, operand, mode, flags, false);
+                    }
+                };
+
+                InstructionValueConsumer inputConsumer = (op, operand, mode, flags) -> {
+                    if (TraceLinearScan.isVariableOrRegister(operand)) {
+                        int opId = op.id();
+                        RegisterPriority p = registerPriorityOfInputOperand(flags);
+                        int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
+                        addUse((AllocatableValue) operand, blockFrom, opId, p, operand.getLIRKind());
+                        addRegisterHint(op, operand, mode, flags, false);
+                    }
+                };
+
+                InstructionValueConsumer stateProc = (op, operand, mode, flags) -> {
+                    if (TraceLinearScan.isVariableOrRegister(operand)) {
+                        int opId = op.id();
+                        int blockFrom = allocator.getFirstLirInstructionId((allocator.blockForId(opId)));
+                        addUse((AllocatableValue) operand, blockFrom, opId + 1, RegisterPriority.None, operand.getLIRKind());
+                    }
+                };
+
+                // create a list with all caller-save registers (cpu, fpu, xmm)
+                Register[] callerSaveRegs = allocator.getRegisterAllocationConfig().getRegisterConfig().getCallerSaveRegisters();
+
+                // iterate all blocks in reverse order
+                for (int i = allocator.blockCount() - 1; i >= 0; i--) {
+
+                    AbstractBlockBase<?> block = allocator.blockAt(i);
+                    // TODO (je) make empty bitset - remove
+                    allocator.getBlockData(block).liveIn = new BitSet();
+                    allocator.getBlockData(block).liveOut = new BitSet();
+                    try (Indent indent2 = Debug.logAndIndent("handle block %d", block.getId())) {
+
+                        List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(block);
+
+                        /*
+                         * Iterate all instructions of the block in reverse order. definitions of
+                         * intervals are processed before uses.
+                         */
+                        for (int j = instructions.size() - 1; j >= 0; j--) {
+                            final LIRInstruction op = instructions.get(j);
+                            final int opId = op.id();
+
+                            try (Indent indent3 = Debug.logAndIndent("handle inst %d: %s", opId, op)) {
+
+                                // add a temp range for each register if operation destroys
+                                // caller-save registers
+                                if (op.destroysCallerSavedRegisters()) {
+                                    for (Register r : callerSaveRegs) {
+                                        if (allocator.attributes(r).isAllocatable()) {
+                                            addTemp(r.asValue(), opId, RegisterPriority.None, LIRKind.Illegal);
+                                        }
+                                    }
+                                    if (Debug.isLogEnabled()) {
+                                        Debug.log("operation destroys all caller-save registers");
+                                    }
+                                }
+
+                                op.visitEachOutput(outputConsumer);
+                                op.visitEachTemp(tempConsumer);
+                                op.visitEachAlive(aliveConsumer);
+                                op.visitEachInput(inputConsumer);
+
+                                /*
+                                 * Add uses of live locals from interpreter's point of view for
+                                 * proper debug information generation. Treat these operands as temp
+                                 * values (if the live range is extended to a call site, the value
+                                 * would be in a register at the call otherwise).
+                                 */
+                                op.visitEachState(stateProc);
+                            }
+
+                        } // end of instruction iteration
+                    }
+                    if (Debug.isDumpEnabled(DUMP_DURING_ANALYSIS_LEVEL)) {
+                        allocator.printIntervals("After Block " + block);
+                    }
+                } // end of block iteration
+
+                // fix spill state for phi/sigma intervals
+                for (TraceInterval interval : allocator.intervals()) {
+                    if (interval != null && interval.spillState().equals(SpillState.NoDefinitionFound) && interval.spillDefinitionPos() != -1) {
+                        // there was a definition in a phi/sigma
+                        interval.setSpillState(SpillState.NoSpillStore);
+                    }
+                }
+                if (TraceRAuseInterTraceHints.getValue()) {
+                    addInterTraceHints();
+                }
+                /*
+                 * Add the range [-1, 0] to all fixed intervals. the register allocator need not
+                 * handle unhandled fixed intervals.
+                 */
+                for (FixedInterval interval : allocator.fixedIntervals()) {
+                    if (interval != null) {
+                        /* We use [-1, 0] to avoid intersection with incoming values. */
+                        interval.addRange(-1, 0);
+                    }
+                }
+            }
+        }
+
+        private void addInterTraceHints() {
+            // set hints for phi/sigma intervals
+            LIR lir = allocator.getLIR();
+            for (AbstractBlockBase<?> block : allocator.sortedBlocks()) {
+                LabelOp label = SSIUtil.incoming(lir, block);
+                for (AbstractBlockBase<?> pred : block.getPredecessors()) {
+                    if (isAllocatedOrCurrent(block, pred)) {
+                        BlockEndOp outgoing = SSIUtil.outgoing(lir, pred);
+                        for (int i = 0; i < outgoing.getOutgoingSize(); i++) {
+                            Value toValue = label.getIncomingValue(i);
+                            assert !isShadowedRegisterValue(toValue) : "Shadowed Registers are not allowed here: " + toValue;
+                            if (isVariable(toValue)) {
+                                Value fromValue = outgoing.getOutgoingValue(i);
+                                assert sameTrace(block, pred) || !isVariable(fromValue) : "Unallocated variable: " + fromValue;
+                                if (!LIRValueUtil.isConstantValue(fromValue)) {
+                                    addInterTraceHint(label, (AllocatableValue) toValue, fromValue);
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        private void addInterTraceHint(LabelOp label, AllocatableValue toValue, Value fromValue) {
+            assert isVariable(toValue) : "Wrong toValue: " + toValue;
+            assert isRegister(fromValue) || isVariable(fromValue) || isStackSlotValue(fromValue) || isShadowedRegisterValue(fromValue) : "Wrong fromValue: " + fromValue;
+            if (isVariableOrRegister(fromValue)) {
+                TraceInterval to = allocator.getOrCreateInterval(toValue);
+                IntervalHint from = getIntervalHint((AllocatableValue) fromValue);
+                setHint(label, to, from);
+            } else if (isStackSlotValue(fromValue)) {
+                TraceInterval to = allocator.getOrCreateInterval(toValue);
+                to.setSpillSlot((AllocatableValue) fromValue);
+                to.setSpillState(SpillState.StartInMemory);
+            } else if (TraceRAshareSpillInformation.getValue() && isShadowedRegisterValue(fromValue)) {
+                ShadowedRegisterValue shadowedRegisterValue = asShadowedRegisterValue(fromValue);
+                IntervalHint from = getIntervalHint(shadowedRegisterValue.getRegister());
+                TraceInterval to = allocator.getOrCreateInterval(toValue);
+                setHint(label, to, from);
+                to.setSpillSlot(shadowedRegisterValue.getStackSlot());
+                to.setSpillState(SpillState.StartInMemory);
+            } else {
+                throw JVMCIError.shouldNotReachHere();
+            }
+        }
+
+        /**
+         * Returns a value for a interval definition, which can be used for re-materialization.
+         *
+         * @param op An instruction which defines a value
+         * @param operand The destination operand of the instruction
+         * @param interval The interval for this defined value.
+         * @return Returns the value which is moved to the instruction and which can be reused at
+         *         all reload-locations in case the interval of this instruction is spilled.
+         *         Currently this can only be a {@link JavaConstant}.
+         */
+        private JavaConstant getMaterializedValue(LIRInstruction op, Value operand, TraceInterval interval) {
+            if (op instanceof LoadConstantOp) {
+                LoadConstantOp move = (LoadConstantOp) op;
+                if (move.getConstant() instanceof JavaConstant) {
+                    if (!allocator.neverSpillConstants()) {
+                        if (!allocator.getSpillMoveFactory().allowConstantToStackMove(move.getConstant())) {
+                            return null;
+                        }
+                        /*
+                         * Check if the interval has any uses which would accept an stack location
+                         * (priority == ShouldHaveRegister). Rematerialization of such intervals can
+                         * result in a degradation, because rematerialization always inserts a
+                         * constant load, even if the value is not needed in a register.
+                         */
+                        UsePosList usePosList = interval.usePosList();
+                        int numUsePos = usePosList.size();
+                        for (int useIdx = 0; useIdx < numUsePos; useIdx++) {
+                            TraceInterval.RegisterPriority priority = usePosList.registerPriority(useIdx);
+                            if (priority == TraceInterval.RegisterPriority.ShouldHaveRegister) {
+                                return null;
+                            }
+                        }
+                    }
+                    return (JavaConstant) move.getConstant();
+                }
+            }
+            return null;
+        }
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanRegisterAllocationPhase.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import java.util.List;
+
+import jdk.vm.ci.code.TargetDescription;
+
+import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
+import com.oracle.graal.debug.Debug;
+import com.oracle.graal.debug.Indent;
+import com.oracle.graal.lir.gen.LIRGenerationResult;
+
+final class TraceLinearScanRegisterAllocationPhase extends TraceLinearScanAllocationPhase {
+
+    @Override
+    protected <B extends AbstractBlockBase<B>> void run(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder,
+                    TraceLinearScanAllocationContext context) {
+        TraceLinearScan allocator = context.allocator;
+        allocator.printIntervals("Before register allocation");
+        allocateRegisters(allocator);
+        allocator.printIntervals("After register allocation");
+    }
+
+    @SuppressWarnings("try")
+    private static void allocateRegisters(TraceLinearScan allocator) {
+        try (Indent indent = Debug.logAndIndent("allocate registers")) {
+            FixedInterval precoloredIntervals = allocator.createFixedUnhandledList();
+            TraceInterval notPrecoloredIntervals = allocator.createUnhandledListByFrom(TraceLinearScan.IS_VARIABLE_INTERVAL);
+
+            // allocate cpu registers
+            TraceLinearScanWalker lsw = new TraceLinearScanWalker(allocator, precoloredIntervals, notPrecoloredIntervals);
+            lsw.walk();
+            lsw.finishAllocation();
+        }
+    }
+
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanResolveDataFlowPhase.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,240 @@
+/*
+ * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import static com.oracle.graal.compiler.common.GraalOptions.DetailedAsserts;
+import static com.oracle.graal.lir.LIRValueUtil.asConstant;
+import static com.oracle.graal.lir.LIRValueUtil.isConstantValue;
+import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
+import static com.oracle.graal.lir.LIRValueUtil.isVirtualStackSlot;
+import static jdk.vm.ci.code.ValueUtil.isRegister;
+
+import java.util.BitSet;
+import java.util.List;
+import java.util.ListIterator;
+
+import jdk.vm.ci.code.TargetDescription;
+import jdk.vm.ci.meta.Value;
+
+import com.oracle.graal.compiler.common.alloc.TraceBuilder.TraceBuilderResult;
+import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
+import com.oracle.graal.debug.Debug;
+import com.oracle.graal.debug.DebugMetric;
+import com.oracle.graal.debug.Indent;
+import com.oracle.graal.lir.LIRInstruction;
+import com.oracle.graal.lir.StandardOp;
+import com.oracle.graal.lir.gen.LIRGenerationResult;
+import com.oracle.graal.lir.ssa.SSAUtil.PhiValueVisitor;
+import com.oracle.graal.lir.ssi.SSIUtil;
+
+/**
+ * Phase 6: resolve data flow
+ *
+ * Insert moves at edges between blocks if intervals have been split.
+ */
+final class TraceLinearScanResolveDataFlowPhase extends TraceLinearScanAllocationPhase {
+
+    @Override
+    protected <B extends AbstractBlockBase<B>> void run(TargetDescription target, LIRGenerationResult lirGenRes, List<B> codeEmittingOrder, List<B> linearScanOrder,
+                    TraceLinearScanAllocationContext context) {
+        TraceBuilderResult<?> traceBuilderResult = context.traceBuilderResult;
+        TraceLinearScan allocator = context.allocator;
+        new Resolver(allocator, traceBuilderResult).resolveDataFlow(allocator.sortedBlocks());
+    }
+
+    private static final class Resolver {
+        private final TraceLinearScan allocator;
+        private final TraceBuilderResult<?> traceBuilderResult;
+
+        private Resolver(TraceLinearScan allocator, TraceBuilderResult<?> traceBuilderResult) {
+            this.allocator = allocator;
+            this.traceBuilderResult = traceBuilderResult;
+        }
+
+        private void resolveFindInsertPos(AbstractBlockBase<?> fromBlock, AbstractBlockBase<?> toBlock, TraceLocalMoveResolver moveResolver) {
+            if (fromBlock.getSuccessorCount() <= 1) {
+                if (Debug.isLogEnabled()) {
+                    Debug.log("inserting moves at end of fromBlock B%d", fromBlock.getId());
+                }
+
+                List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(fromBlock);
+                LIRInstruction instr = instructions.get(instructions.size() - 1);
+                if (instr instanceof StandardOp.JumpOp) {
+                    // insert moves before branch
+                    moveResolver.setInsertPosition(instructions, instructions.size() - 1);
+                } else {
+                    moveResolver.setInsertPosition(instructions, instructions.size());
+                }
+
+            } else {
+                if (Debug.isLogEnabled()) {
+                    Debug.log("inserting moves at beginning of toBlock B%d", toBlock.getId());
+                }
+
+                if (DetailedAsserts.getValue()) {
+                    assert allocator.getLIR().getLIRforBlock(fromBlock).get(0) instanceof StandardOp.LabelOp : "block does not start with a label";
+
+                    /*
+                     * Because the number of predecessor edges matches the number of successor
+                     * edges, blocks which are reached by switch statements may have be more than
+                     * one predecessor but it will be guaranteed that all predecessors will be the
+                     * same.
+                     */
+                    for (AbstractBlockBase<?> predecessor : toBlock.getPredecessors()) {
+                        assert fromBlock == predecessor : "all critical edges must be broken";
+                    }
+                }
+
+                moveResolver.setInsertPosition(allocator.getLIR().getLIRforBlock(toBlock), 1);
+            }
+        }
+
+        /**
+         * Inserts necessary moves (spilling or reloading) at edges between blocks for intervals
+         * that have been split.
+         */
+        @SuppressWarnings("try")
+        private void resolveDataFlow(List<? extends AbstractBlockBase<?>> blocks) {
+            if (blocks.size() < 2) {
+                // no resolution necessary
+                return;
+            }
+            try (Indent indent = Debug.logAndIndent("resolve data flow")) {
+
+                TraceLocalMoveResolver moveResolver = allocator.createMoveResolver();
+                ListIterator<? extends AbstractBlockBase<?>> it = blocks.listIterator();
+                AbstractBlockBase<?> toBlock = null;
+                for (AbstractBlockBase<?> fromBlock = it.next(); it.hasNext(); fromBlock = toBlock) {
+                    toBlock = it.next();
+                    assert containedInTrace(fromBlock) : "Not in Trace: " + fromBlock;
+                    assert containedInTrace(toBlock) : "Not in Trace: " + toBlock;
+                    resolveCollectMappings(fromBlock, toBlock, moveResolver);
+                }
+                assert blocks.get(blocks.size() - 1).equals(toBlock);
+                if (toBlock.isLoopEnd()) {
+                    assert toBlock.getSuccessorCount() == 1;
+                    AbstractBlockBase<?> loopHeader = toBlock.getSuccessors().get(0);
+                    if (containedInTrace(loopHeader)) {
+                        resolveCollectMappings(toBlock, loopHeader, moveResolver);
+                    }
+                }
+
+            }
+        }
+
+        @SuppressWarnings("try")
+        private void resolveCollectMappings(AbstractBlockBase<?> fromBlock, AbstractBlockBase<?> toBlock, TraceLocalMoveResolver moveResolver) {
+            try (Indent indent0 = Debug.logAndIndent("Edge %s -> %s", fromBlock, toBlock)) {
+                collectLSRAMappings(fromBlock, toBlock, moveResolver);
+                collectSSIMappings(fromBlock, toBlock, moveResolver);
+            }
+        }
+
+        protected void collectLSRAMappings(AbstractBlockBase<?> fromBlock, AbstractBlockBase<?> toBlock, TraceLocalMoveResolver moveResolver) {
+            assert moveResolver.checkEmpty();
+
+            int toBlockFirstInstructionId = allocator.getFirstLirInstructionId(toBlock);
+            int fromBlockLastInstructionId = allocator.getLastLirInstructionId(fromBlock) + 1;
+            int numOperands = allocator.operandSize();
+            BitSet liveAtEdge = allocator.getBlockData(toBlock).liveIn;
+
+            // visit all variables for which the liveAtEdge bit is set
+            for (int operandNum = liveAtEdge.nextSetBit(0); operandNum >= 0; operandNum = liveAtEdge.nextSetBit(operandNum + 1)) {
+                assert operandNum < numOperands : "live information set for not exisiting interval";
+                assert allocator.getBlockData(fromBlock).liveOut.get(operandNum) && allocator.getBlockData(toBlock).liveIn.get(operandNum) : "interval not live at this edge";
+
+                TraceInterval fromInterval = allocator.splitChildAtOpId(allocator.intervalFor(operandNum), fromBlockLastInstructionId, LIRInstruction.OperandMode.DEF);
+                TraceInterval toInterval = allocator.splitChildAtOpId(allocator.intervalFor(operandNum), toBlockFirstInstructionId, LIRInstruction.OperandMode.DEF);
+
+                if (fromInterval != toInterval && !fromInterval.location().equals(toInterval.location())) {
+                    // need to insert move instruction
+                    moveResolver.addMapping(fromInterval, toInterval);
+                }
+            }
+        }
+
+        protected void collectSSIMappings(AbstractBlockBase<?> fromBlock, AbstractBlockBase<?> toBlock, TraceLocalMoveResolver moveResolver) {
+            // collect all intervals that have been split between
+            // fromBlock and toBlock
+            SSIUtil.forEachValuePair(allocator.getLIR(), toBlock, fromBlock, new MyPhiValueVisitor(moveResolver, toBlock, fromBlock));
+            if (moveResolver.hasMappings()) {
+                resolveFindInsertPos(fromBlock, toBlock, moveResolver);
+                moveResolver.resolveAndAppendMoves();
+            }
+        }
+
+        private boolean containedInTrace(AbstractBlockBase<?> block) {
+            return currentTrace() == traceBuilderResult.getTraceForBlock(block);
+        }
+
+        private int currentTrace() {
+            return traceBuilderResult.getTraceForBlock(allocator.sortedBlocks().get(0));
+        }
+
+        private static final DebugMetric numSSIResolutionMoves = Debug.metric("SSI LSRA[numSSIResolutionMoves]");
+        private static final DebugMetric numStackToStackMoves = Debug.metric("SSI LSRA[numStackToStackMoves]");
+
+        private class MyPhiValueVisitor implements PhiValueVisitor {
+            final TraceLocalMoveResolver moveResolver;
+            final int toId;
+            final int fromId;
+
+            public MyPhiValueVisitor(TraceLocalMoveResolver moveResolver, AbstractBlockBase<?> toBlock, AbstractBlockBase<?> fromBlock) {
+                this.moveResolver = moveResolver;
+                toId = allocator.getFirstLirInstructionId(toBlock);
+                fromId = allocator.getLastLirInstructionId(fromBlock);
+                assert fromId >= 0;
+            }
+
+            public void visit(Value phiIn, Value phiOut) {
+                assert !isRegister(phiOut) : "Out is a register: " + phiOut;
+                assert !isRegister(phiIn) : "In is a register: " + phiIn;
+                if (Value.ILLEGAL.equals(phiIn)) {
+                    // The value not needed in this branch.
+                    return;
+                }
+                if (isVirtualStackSlot(phiIn) && isVirtualStackSlot(phiOut) && phiIn.equals(phiOut)) {
+                    // no need to handle virtual stack slots
+                    return;
+                }
+                TraceInterval toInterval = allocator.splitChildAtOpId(allocator.intervalFor(phiIn), toId, LIRInstruction.OperandMode.DEF);
+                if (isConstantValue(phiOut)) {
+                    numSSIResolutionMoves.increment();
+                    moveResolver.addMapping(asConstant(phiOut), toInterval);
+                } else {
+                    TraceInterval fromInterval = allocator.splitChildAtOpId(allocator.intervalFor(phiOut), fromId, LIRInstruction.OperandMode.DEF);
+                    if (fromInterval != toInterval) {
+                        numSSIResolutionMoves.increment();
+                        if (!(isStackSlotValue(toInterval.location()) && isStackSlotValue(fromInterval.location()))) {
+                            moveResolver.addMapping(fromInterval, toInterval);
+                        } else {
+                            numStackToStackMoves.increment();
+                            moveResolver.addMapping(fromInterval, toInterval);
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLinearScanWalker.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,1126 @@
+/*
+ * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
+import static com.oracle.graal.lir.LIRValueUtil.isVariable;
+import static jdk.vm.ci.code.CodeUtil.isOdd;
+import static jdk.vm.ci.code.ValueUtil.asRegister;
+import static jdk.vm.ci.code.ValueUtil.isRegister;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.List;
+
+import jdk.vm.ci.code.BailoutException;
+import jdk.vm.ci.code.Register;
+import jdk.vm.ci.common.JVMCIError;
+import jdk.vm.ci.meta.Value;
+
+import com.oracle.graal.compiler.common.alloc.RegisterAllocationConfig.AllocatableRegisters;
+import com.oracle.graal.compiler.common.cfg.AbstractBlockBase;
+import com.oracle.graal.compiler.common.util.Util;
+import com.oracle.graal.debug.Debug;
+import com.oracle.graal.debug.Indent;
+import com.oracle.graal.lir.LIRInstruction;
+import com.oracle.graal.lir.StandardOp.BlockEndOp;
+import com.oracle.graal.lir.StandardOp.LabelOp;
+import com.oracle.graal.lir.StandardOp.ValueMoveOp;
+import com.oracle.graal.lir.alloc.lsra.OutOfRegistersException;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceInterval.RegisterPriority;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceInterval.SpillState;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceInterval.State;
+
+/**
+ */
+final class TraceLinearScanWalker extends TraceIntervalWalker {
+
+    private Register[] availableRegs;
+
+    private final int[] usePos;
+    private final int[] blockPos;
+    private final BitSet isInMemory;
+
+    private List<TraceInterval>[] spillIntervals;
+
+    private TraceLocalMoveResolver moveResolver; // for ordering spill moves
+
+    private int minReg;
+
+    private int maxReg;
+
+    /**
+     * Only 10% of the lists in {@link #spillIntervals} are actually used. But when they are used,
+     * they can grow quite long. The maximum length observed was 45 (all numbers taken from a
+     * bootstrap run of Graal). Therefore, we initialize {@link #spillIntervals} with this marker
+     * value, and allocate a "real" list only on demand in {@link #setUsePos}.
+     */
+    private static final List<TraceInterval> EMPTY_LIST = new ArrayList<>(0);
+
+    // accessors mapped to same functions in class LinearScan
+    private int blockCount() {
+        return allocator.blockCount();
+    }
+
+    private AbstractBlockBase<?> blockAt(int idx) {
+        return allocator.blockAt(idx);
+    }
+
+    @SuppressWarnings("unused")
+    private AbstractBlockBase<?> blockOfOpWithId(int opId) {
+        return allocator.blockForId(opId);
+    }
+
+    TraceLinearScanWalker(TraceLinearScan allocator, FixedInterval unhandledFixedFirst, TraceInterval unhandledAnyFirst) {
+        super(allocator, unhandledFixedFirst, unhandledAnyFirst);
+
+        moveResolver = allocator.createMoveResolver();
+        int numRegs = allocator.getRegisters().length;
+        spillIntervals = Util.uncheckedCast(new List<?>[numRegs]);
+        for (int i = 0; i < numRegs; i++) {
+            spillIntervals[i] = EMPTY_LIST;
+        }
+        usePos = new int[numRegs];
+        blockPos = new int[numRegs];
+        isInMemory = new BitSet(numRegs);
+    }
+
+    private void initUseLists(boolean onlyProcessUsePos) {
+        for (Register register : availableRegs) {
+            int i = register.number;
+            usePos[i] = Integer.MAX_VALUE;
+
+            if (!onlyProcessUsePos) {
+                blockPos[i] = Integer.MAX_VALUE;
+                spillIntervals[i].clear();
+                isInMemory.clear(i);
+            }
+        }
+    }
+
+    private int maxRegisterNumber() {
+        return maxReg;
+    }
+
+    private int minRegisterNumber() {
+        return minReg;
+    }
+
+    private boolean isRegisterInRange(int reg) {
+        return reg >= minRegisterNumber() && reg <= maxRegisterNumber();
+    }
+
+    private void excludeFromUse(IntervalHint i) {
+        Value location = i.location();
+        int i1 = asRegister(location).number;
+        if (isRegisterInRange(i1)) {
+            usePos[i1] = 0;
+        }
+    }
+
+    private void setUsePos(TraceInterval interval, int usePos, boolean onlyProcessUsePos) {
+        if (usePos != -1) {
+            assert usePos != 0 : "must use excludeFromUse to set usePos to 0";
+            int i = asRegister(interval.location()).number;
+            if (isRegisterInRange(i)) {
+                if (this.usePos[i] > usePos) {
+                    this.usePos[i] = usePos;
+                }
+                if (!onlyProcessUsePos) {
+                    List<TraceInterval> list = spillIntervals[i];
+                    if (list == EMPTY_LIST) {
+                        list = new ArrayList<>(2);
+                        spillIntervals[i] = list;
+                    }
+                    list.add(interval);
+                    // set is in memory flag
+                    if (interval.inMemoryAt(currentPosition)) {
+                        isInMemory.set(i);
+                    }
+                }
+            }
+        }
+    }
+
+    private void setUsePos(FixedInterval interval, int usePos, boolean onlyProcessUsePos) {
+        assert onlyProcessUsePos;
+        if (usePos != -1) {
+            assert usePos != 0 : "must use excludeFromUse to set usePos to 0";
+            int i = asRegister(interval.location()).number;
+            if (isRegisterInRange(i)) {
+                if (this.usePos[i] > usePos) {
+                    this.usePos[i] = usePos;
+                }
+            }
+        }
+    }
+
+    private void setBlockPos(IntervalHint i, int blockPos) {
+        if (blockPos != -1) {
+            int reg = asRegister(i.location()).number;
+            if (isRegisterInRange(reg)) {
+                if (this.blockPos[reg] > blockPos) {
+                    this.blockPos[reg] = blockPos;
+                }
+                if (usePos[reg] > blockPos) {
+                    usePos[reg] = blockPos;
+                }
+            }
+        }
+    }
+
+    private void freeExcludeActiveFixed() {
+        FixedInterval interval = activeFixedList.getFixed();
+        while (interval != FixedInterval.EndMarker) {
+            assert isRegister(interval.location()) : "active interval must have a register assigned";
+            excludeFromUse(interval);
+            interval = interval.next;
+        }
+    }
+
+    private void freeExcludeActiveAny() {
+        TraceInterval interval = activeAnyList.getAny();
+        while (interval != TraceInterval.EndMarker) {
+            assert isRegister(interval.location()) : "active interval must have a register assigned";
+            excludeFromUse(interval);
+            interval = interval.next;
+        }
+    }
+
+    private void freeCollectInactiveFixed(TraceInterval current) {
+        FixedInterval interval = inactiveFixedList.getFixed();
+        while (interval != FixedInterval.EndMarker) {
+            if (current.to() <= interval.from()) {
+                assert interval.intersectsAt(current) == -1 : "must not intersect";
+                setUsePos(interval, interval.from(), true);
+            } else {
+                setUsePos(interval, interval.currentIntersectsAt(current), true);
+            }
+            interval = interval.next;
+        }
+    }
+
+    private void spillExcludeActiveFixed() {
+        FixedInterval interval = activeFixedList.getFixed();
+        while (interval != FixedInterval.EndMarker) {
+            excludeFromUse(interval);
+            interval = interval.next;
+        }
+    }
+
+    private void spillBlockInactiveFixed(TraceInterval current) {
+        FixedInterval interval = inactiveFixedList.getFixed();
+        while (interval != FixedInterval.EndMarker) {
+            if (current.to() > interval.currentFrom()) {
+                setBlockPos(interval, interval.currentIntersectsAt(current));
+            } else {
+                assert interval.currentIntersectsAt(current) == -1 : "invalid optimization: intervals intersect";
+            }
+
+            interval = interval.next;
+        }
+    }
+
+    private void spillCollectActiveAny(RegisterPriority registerPriority) {
+        TraceInterval interval = activeAnyList.getAny();
+        while (interval != TraceInterval.EndMarker) {
+            setUsePos(interval, Math.min(interval.nextUsage(registerPriority, currentPosition), interval.to()), false);
+            interval = interval.next;
+        }
+    }
+
+    @SuppressWarnings("unused")
+    private int insertIdAtBasicBlockBoundary(int opId) {
+        assert allocator.isBlockBegin(opId) : "Not a block begin: " + opId;
+        assert allocator.instructionForId(opId) instanceof LabelOp;
+        assert allocator.instructionForId(opId - 2) instanceof BlockEndOp;
+
+        AbstractBlockBase<?> toBlock = allocator.blockForId(opId);
+        AbstractBlockBase<?> fromBlock = allocator.blockForId(opId - 2);
+
+        if (fromBlock.getSuccessorCount() == 1) {
+            // insert move in predecessor
+            return opId - 2;
+        }
+        assert toBlock.getPredecessorCount() == 1 : String.format("Critical Edge? %s->%s", fromBlock, toBlock);
+        // insert move in successor
+        return opId + 2;
+    }
+
+    private void insertMove(int operandId, TraceInterval srcIt, TraceInterval dstIt) {
+        // output all moves here. When source and target are equal, the move is
+        // optimized away later in assignRegNums
+
+        int opId = (operandId + 1) & ~1;
+        AbstractBlockBase<?> opBlock = allocator.blockForId(opId);
+        assert opId > 0 && allocator.blockForId(opId - 2) == opBlock : "cannot insert move at block boundary";
+
+        // calculate index of instruction inside instruction list of current block
+        // the minimal index (for a block with no spill moves) can be calculated because the
+        // numbering of instructions is known.
+        // When the block already contains spill moves, the index must be increased until the
+        // correct index is reached.
+        List<LIRInstruction> instructions = allocator.getLIR().getLIRforBlock(opBlock);
+        int index = (opId - instructions.get(0).id()) >> 1;
+        assert instructions.get(index).id() <= opId : "error in calculation";
+
+        while (instructions.get(index).id() != opId) {
+            index++;
+            assert 0 <= index && index < instructions.size() : "index out of bounds";
+        }
+        assert 1 <= index && index < instructions.size() : "index out of bounds";
+        assert instructions.get(index).id() == opId : "error in calculation";
+
+        // insert new instruction before instruction at position index
+        moveResolver.moveInsertPosition(instructions, index);
+        moveResolver.addMapping(srcIt, dstIt);
+    }
+
+    private int findOptimalSplitPos(AbstractBlockBase<?> minBlock, AbstractBlockBase<?> maxBlock, int maxSplitPos) {
+        int fromBlockNr = minBlock.getLinearScanNumber();
+        int toBlockNr = maxBlock.getLinearScanNumber();
+
+        assert 0 <= fromBlockNr && fromBlockNr < blockCount() : "out of range";
+        assert 0 <= toBlockNr && toBlockNr < blockCount() : "out of range";
+        assert fromBlockNr < toBlockNr : "must cross block boundary";
+
+        // Try to split at end of maxBlock. If this would be after
+        // maxSplitPos, then use the begin of maxBlock
+        int optimalSplitPos = allocator.getLastLirInstructionId(maxBlock) + 2;
+        if (optimalSplitPos > maxSplitPos) {
+            optimalSplitPos = allocator.getFirstLirInstructionId(maxBlock);
+        }
+
+        // minimal block probability
+        double minProbability = maxBlock.probability();
+        for (int i = toBlockNr - 1; i >= fromBlockNr; i--) {
+            AbstractBlockBase<?> cur = blockAt(i);
+
+            if (cur.probability() < minProbability) {
+                // Block with lower probability found. Split at the end of this block.
+                minProbability = cur.probability();
+                optimalSplitPos = allocator.getLastLirInstructionId(cur) + 2;
+            }
+        }
+        assert optimalSplitPos > allocator.maxOpId() || allocator.isBlockBegin(optimalSplitPos) : "algorithm must move split pos to block boundary";
+
+        return optimalSplitPos;
+    }
+
+    @SuppressWarnings({"unused"})
+    private int findOptimalSplitPos(TraceInterval interval, int minSplitPos, int maxSplitPos, boolean doLoopOptimization) {
+        int optimalSplitPos = findOptimalSplitPos0(minSplitPos, maxSplitPos);
+        if (Debug.isLogEnabled()) {
+            Debug.log("optimal split position: %d", optimalSplitPos);
+        }
+        return optimalSplitPos;
+    }
+
+    private int findOptimalSplitPos0(int minSplitPos, int maxSplitPos) {
+        if (minSplitPos == maxSplitPos) {
+            // trivial case, no optimization of split position possible
+            if (Debug.isLogEnabled()) {
+                Debug.log("min-pos and max-pos are equal, no optimization possible");
+            }
+            return minSplitPos;
+
+        }
+        assert minSplitPos < maxSplitPos : "must be true then";
+        assert minSplitPos > 0 : "cannot access minSplitPos - 1 otherwise";
+
+        // reason for using minSplitPos - 1: when the minimal split pos is exactly at the
+        // beginning of a block, then minSplitPos is also a possible split position.
+        // Use the block before as minBlock, because then minBlock.lastLirInstructionId() + 2 ==
+        // minSplitPos
+        AbstractBlockBase<?> minBlock = allocator.blockForId(minSplitPos - 1);
+
+        // reason for using maxSplitPos - 1: otherwise there would be an assert on failure
+        // when an interval ends at the end of the last block of the method
+        // (in this case, maxSplitPos == allocator().maxLirOpId() + 2, and there is no
+        // block at this opId)
+        AbstractBlockBase<?> maxBlock = allocator.blockForId(maxSplitPos - 1);
+
+        assert minBlock.getLinearScanNumber() <= maxBlock.getLinearScanNumber() : "invalid order";
+        if (minBlock == maxBlock) {
+            // split position cannot be moved to block boundary : so split as late as possible
+            if (Debug.isLogEnabled()) {
+                Debug.log("cannot move split pos to block boundary because minPos and maxPos are in same block");
+            }
+            return maxSplitPos;
+
+        }
+        // seach optimal block boundary between minSplitPos and maxSplitPos
+        if (Debug.isLogEnabled()) {
+            Debug.log("moving split pos to optimal block boundary between block B%d and B%d", minBlock.getId(), maxBlock.getId());
+        }
+
+        return findOptimalSplitPos(minBlock, maxBlock, maxSplitPos);
+    }
+
+    // split an interval at the optimal position between minSplitPos and
+    // maxSplitPos in two parts:
+    // 1) the left part has already a location assigned
+    // 2) the right part is sorted into to the unhandled-list
+    @SuppressWarnings("try")
+    private void splitBeforeUsage(TraceInterval interval, int minSplitPos, int maxSplitPos) {
+
+        try (Indent indent = Debug.logAndIndent("splitting interval %s between %d and %d", interval, minSplitPos, maxSplitPos)) {
+
+            assert interval.from() < minSplitPos : "cannot split at start of interval";
+            assert currentPosition < minSplitPos : "cannot split before current position";
+            assert minSplitPos <= maxSplitPos : "invalid order";
+            assert maxSplitPos <= interval.to() : "cannot split after end of interval";
+
+            final int optimalSplitPos = findOptimalSplitPos(interval, minSplitPos, maxSplitPos, true);
+
+            if (optimalSplitPos == interval.to() && interval.nextUsage(RegisterPriority.MustHaveRegister, minSplitPos) == Integer.MAX_VALUE) {
+                // the split position would be just before the end of the interval
+                // . no split at all necessary
+                if (Debug.isLogEnabled()) {
+                    Debug.log("no split necessary because optimal split position is at end of interval");
+                }
+                return;
+            }
+            // must calculate this before the actual split is performed and before split position is
+            // moved to odd opId
+            final int optimalSplitPosFinal;
+            boolean blockBegin = allocator.isBlockBegin(optimalSplitPos);
+            if (blockBegin) {
+                assert (optimalSplitPos & 1) == 0 : "Block begins must be even: " + optimalSplitPos;
+                // move position after the label (odd optId)
+                optimalSplitPosFinal = optimalSplitPos + 1;
+            } else {
+                // move position before actual instruction (odd opId)
+                optimalSplitPosFinal = (optimalSplitPos - 1) | 1;
+            }
+
+            // TODO( je) better define what min split pos max split pos mean.
+            assert minSplitPos <= optimalSplitPosFinal && optimalSplitPosFinal <= maxSplitPos || minSplitPos == maxSplitPos && optimalSplitPosFinal == minSplitPos - 1 : "out of range";
+            assert optimalSplitPosFinal <= interval.to() : "cannot split after end of interval";
+            assert optimalSplitPosFinal > interval.from() : "cannot split at start of interval";
+
+            if (Debug.isLogEnabled()) {
+                Debug.log("splitting at position %d", optimalSplitPosFinal);
+            }
+            assert optimalSplitPosFinal > currentPosition : "Can not split interval " + interval + " at current position: " + currentPosition;
+
+            // was:
+            // assert isBlockBegin || ((optimalSplitPos1 & 1) == 1) :
+            // "split pos must be odd when not on block boundary";
+            // assert !isBlockBegin || ((optimalSplitPos1 & 1) == 0) :
+            // "split pos must be even on block boundary";
+            assert (optimalSplitPosFinal & 1) == 1 : "split pos must be odd";
+
+            // TODO (je) duplicate code. try to fold
+            if (optimalSplitPosFinal == interval.to() && interval.nextUsage(RegisterPriority.MustHaveRegister, minSplitPos) == Integer.MAX_VALUE) {
+                // the split position would be just before the end of the interval
+                // . no split at all necessary
+                if (Debug.isLogEnabled()) {
+                    Debug.log("no split necessary because optimal split position is at end of interval");
+                }
+                return;
+            }
+            TraceInterval splitPart = interval.split(optimalSplitPosFinal, allocator);
+
+            boolean moveNecessary = true;
+            splitPart.setInsertMoveWhenActivated(moveNecessary);
+
+            assert splitPart.from() >= currentPosition : "cannot append new interval before current walk position";
+            unhandledAnyList.addToListSortedByStartAndUsePositions(splitPart);
+
+            if (Debug.isLogEnabled()) {
+                Debug.log("left interval  %s: %s", moveNecessary ? "      " : "", interval.logString(allocator));
+                Debug.log("right interval %s: %s", moveNecessary ? "(move)" : "", splitPart.logString(allocator));
+            }
+        }
+    }
+
+    // split an interval at the optimal position between minSplitPos and
+    // maxSplitPos in two parts:
+    // 1) the left part has already a location assigned
+    // 2) the right part is always on the stack and therefore ignored in further processing
+    @SuppressWarnings("try")
+    private void splitForSpilling(TraceInterval interval) {
+        // calculate allowed range of splitting position
+        int maxSplitPos = currentPosition;
+        int previousUsage = interval.previousUsage(RegisterPriority.ShouldHaveRegister, maxSplitPos);
+        if (previousUsage == currentPosition) {
+            /*
+             * If there is a usage with ShouldHaveRegister priority at the current position fall
+             * back to MustHaveRegister priority. This only happens if register priority was
+             * downgraded to MustHaveRegister in #allocLockedRegister.
+             */
+            previousUsage = interval.previousUsage(RegisterPriority.MustHaveRegister, maxSplitPos);
+        }
+        int minSplitPos = Math.max(previousUsage + 1, interval.from());
+
+        try (Indent indent = Debug.logAndIndent("splitting and spilling interval %s between %d and %d", interval, minSplitPos, maxSplitPos)) {
+
+            assert interval.state == State.Active : "why spill interval that is not active?";
+            assert interval.from() <= minSplitPos : "cannot split before start of interval";
+            assert minSplitPos <= maxSplitPos : "invalid order";
+            assert maxSplitPos < interval.to() : "cannot split at end end of interval";
+            assert currentPosition < interval.to() : "interval must not end before current position";
+
+            if (minSplitPos == interval.from()) {
+                // the whole interval is never used, so spill it entirely to memory
+
+                try (Indent indent2 = Debug.logAndIndent("spilling entire interval because split pos is at beginning of interval (use positions: %d)", interval.usePosList().size())) {
+
+                    assert interval.firstUsage(RegisterPriority.MustHaveRegister) > currentPosition : String.format("interval %s must not have use position before currentPosition %d", interval,
+                                    currentPosition);
+
+                    allocator.assignSpillSlot(interval);
+                    handleSpillSlot(interval);
+                    changeSpillState(interval, minSplitPos);
+
+                    // Also kick parent intervals out of register to memory when they have no use
+                    // position. This avoids short interval in register surrounded by intervals in
+                    // memory . avoid useless moves from memory to register and back
+                    TraceInterval parent = interval;
+                    while (parent != null && parent.isSplitChild()) {
+                        parent = parent.getSplitChildBeforeOpId(parent.from());
+
+                        if (isRegister(parent.location())) {
+                            if (parent.firstUsage(RegisterPriority.ShouldHaveRegister) == Integer.MAX_VALUE) {
+                                // parent is never used, so kick it out of its assigned register
+                                if (Debug.isLogEnabled()) {
+                                    Debug.log("kicking out interval %d out of its register because it is never used", parent.operandNumber);
+                                }
+                                allocator.assignSpillSlot(parent);
+                                handleSpillSlot(parent);
+                            } else {
+                                // do not go further back because the register is actually used by
+                                // the interval
+                                parent = null;
+                            }
+                        }
+                    }
+                }
+
+            } else {
+                // search optimal split pos, split interval and spill only the right hand part
+                int optimalSplitPos = findOptimalSplitPos(interval, minSplitPos, maxSplitPos, false);
+
+                assert minSplitPos <= optimalSplitPos && optimalSplitPos <= maxSplitPos : "out of range";
+                assert optimalSplitPos < interval.to() : "cannot split at end of interval";
+                assert optimalSplitPos >= interval.from() : "cannot split before start of interval";
+
+                if (!allocator.isBlockBegin(optimalSplitPos)) {
+                    // move position before actual instruction (odd opId)
+                    optimalSplitPos = (optimalSplitPos - 1) | 1;
+                }
+
+                try (Indent indent2 = Debug.logAndIndent("splitting at position %d", optimalSplitPos)) {
+                    assert allocator.isBlockBegin(optimalSplitPos) || ((optimalSplitPos & 1) == 1) : "split pos must be odd when not on block boundary";
+                    assert !allocator.isBlockBegin(optimalSplitPos) || ((optimalSplitPos & 1) == 0) : "split pos must be even on block boundary";
+
+                    TraceInterval spilledPart = interval.split(optimalSplitPos, allocator);
+                    allocator.assignSpillSlot(spilledPart);
+                    handleSpillSlot(spilledPart);
+                    changeSpillState(spilledPart, optimalSplitPos);
+
+                    if (!allocator.isBlockBegin(optimalSplitPos)) {
+                        if (Debug.isLogEnabled()) {
+                            Debug.log("inserting move from interval %s to %s", interval, spilledPart);
+                        }
+                        insertMove(optimalSplitPos, interval, spilledPart);
+                    } else {
+                        if (Debug.isLogEnabled()) {
+                            Debug.log("no need to insert move. done by data-flow resolution");
+                        }
+                    }
+
+                    // the currentSplitChild is needed later when moves are inserted for reloading
+                    assert spilledPart.currentSplitChild() == interval : "overwriting wrong currentSplitChild";
+                    spilledPart.makeCurrentSplitChild();
+
+                    if (Debug.isLogEnabled()) {
+                        Debug.log("left interval: %s", interval.logString(allocator));
+                        Debug.log("spilled interval   : %s", spilledPart.logString(allocator));
+                    }
+                }
+            }
+        }
+    }
+
+    /**
+     * Change spill state of an interval.
+     *
+     * Note: called during register allocation.
+     *
+     * @param spillPos position of the spill
+     */
+    private void changeSpillState(TraceInterval interval, int spillPos) {
+        if (TraceLinearScan.Options.LIROptTraceRAEliminateSpillMoves.getValue()) {
+            switch (interval.spillState()) {
+                case NoSpillStore:
+                    final int minSpillPos = interval.spillDefinitionPos();
+                    final int maxSpillPost = spillPos;
+
+                    final int optimalSpillPos = findOptimalSpillPos(minSpillPos, maxSpillPost);
+
+                    // assert !allocator.isBlockBegin(optimalSpillPos);
+                    assert !allocator.isBlockEnd(optimalSpillPos);
+                    assert (optimalSpillPos & 1) == 0 : "Spill pos must be even";
+
+                    interval.setSpillDefinitionPos(optimalSpillPos);
+                    interval.setSpillState(SpillState.SpillStore);
+                    break;
+                case SpillStore:
+                case StartInMemory:
+                case NoOptimization:
+                case NoDefinitionFound:
+                    // nothing to do
+                    break;
+
+                default:
+                    throw new BailoutException("other states not allowed at this time");
+            }
+        } else {
+            interval.setSpillState(SpillState.NoOptimization);
+        }
+    }
+
+    /**
+     * @param minSpillPos minimal spill position
+     * @param maxSpillPos maximal spill position
+     */
+    private int findOptimalSpillPos(int minSpillPos, int maxSpillPos) {
+        int optimalSpillPos = findOptimalSpillPos0(minSpillPos, maxSpillPos) & (~1);
+        if (Debug.isLogEnabled()) {
+            Debug.log("optimal spill position: %d", optimalSpillPos);
+        }
+        return optimalSpillPos;
+    }
+
+    private int findOptimalSpillPos0(int minSpillPos, int maxSpillPos) {
+        if (minSpillPos == maxSpillPos) {
+            // trivial case, no optimization of split position possible
+            if (Debug.isLogEnabled()) {
+                Debug.log("min-pos and max-pos are equal, no optimization possible");
+            }
+            return minSpillPos;
+
+        }
+        assert minSpillPos < maxSpillPos : "must be true then";
+        assert minSpillPos >= 0 : "cannot access minSplitPos - 1 otherwise";
+
+        AbstractBlockBase<?> minBlock = allocator.blockForId(minSpillPos);
+        AbstractBlockBase<?> maxBlock = allocator.blockForId(maxSpillPos);
+
+        assert minBlock.getLinearScanNumber() <= maxBlock.getLinearScanNumber() : "invalid order";
+        if (minBlock == maxBlock) {
+            // split position cannot be moved to block boundary : so split as late as possible
+            if (Debug.isLogEnabled()) {
+                Debug.log("cannot move split pos to block boundary because minPos and maxPos are in same block");
+            }
+            return maxSpillPos;
+
+        }
+        // search optimal block boundary between minSplitPos and maxSplitPos
+        if (Debug.isLogEnabled()) {
+            Debug.log("moving split pos to optimal block boundary between block B%d and B%d", minBlock.getId(), maxBlock.getId());
+        }
+
+        // currently using the same heuristic as for splitting
+        return findOptimalSpillPos(minBlock, maxBlock, maxSpillPos);
+    }
+
+    private int findOptimalSpillPos(AbstractBlockBase<?> minBlock, AbstractBlockBase<?> maxBlock, int maxSplitPos) {
+        int fromBlockNr = minBlock.getLinearScanNumber();
+        int toBlockNr = maxBlock.getLinearScanNumber();
+
+        assert 0 <= fromBlockNr && fromBlockNr < blockCount() : "out of range";
+        assert 0 <= toBlockNr && toBlockNr < blockCount() : "out of range";
+        assert fromBlockNr < toBlockNr : "must cross block boundary";
+
+        /*
+         * Try to split at end of maxBlock. If this would be after maxSplitPos, then use the begin
+         * of maxBlock. We use last instruction -2 because we want to insert the move before the
+         * block end op.
+         */
+        int optimalSplitPos = allocator.getLastLirInstructionId(maxBlock) - 2;
+        if (optimalSplitPos > maxSplitPos) {
+            optimalSplitPos = allocator.getFirstLirInstructionId(maxBlock);
+        }
+
+        // minimal block probability
+        double minProbability = maxBlock.probability();
+        for (int i = toBlockNr - 1; i >= fromBlockNr; i--) {
+            AbstractBlockBase<?> cur = blockAt(i);
+
+            if (cur.probability() < minProbability) {
+                // Block with lower probability found. Split at the end of this block.
+                minProbability = cur.probability();
+                optimalSplitPos = allocator.getLastLirInstructionId(cur) - 2;
+            }
+        }
+        assert optimalSplitPos > allocator.maxOpId() || allocator.isBlockBegin(optimalSplitPos) || allocator.isBlockEnd(optimalSplitPos + 2) : "algorithm must move split pos to block boundary";
+
+        return optimalSplitPos;
+    }
+
+    /**
+     * This is called for every interval that is assigned to a stack slot.
+     */
+    private static void handleSpillSlot(TraceInterval interval) {
+        assert interval.location() != null && (interval.canMaterialize() || isStackSlotValue(interval.location())) : "interval not assigned to a stack slot " + interval;
+        // Do nothing. Stack slots are not processed in this implementation.
+    }
+
+    private void splitStackInterval(TraceInterval interval) {
+        int minSplitPos = currentPosition + 1;
+        int maxSplitPos = Math.min(interval.firstUsage(RegisterPriority.ShouldHaveRegister), interval.to());
+
+        splitBeforeUsage(interval, minSplitPos, maxSplitPos);
+    }
+
+    private void splitWhenPartialRegisterAvailable(TraceInterval interval, int registerAvailableUntil) {
+        int minSplitPos = Math.max(interval.previousUsage(RegisterPriority.ShouldHaveRegister, registerAvailableUntil), interval.from() + 1);
+        splitBeforeUsage(interval, minSplitPos, registerAvailableUntil);
+    }
+
+    private void splitAndSpillInterval(TraceInterval interval) {
+        assert interval.state == State.Active || interval.state == State.Inactive : "other states not allowed";
+
+        int currentPos = currentPosition;
+        if (interval.state == State.Inactive) {
+            // the interval is currently inactive, so no spill slot is needed for now.
+            // when the split part is activated, the interval has a new chance to get a register,
+            // so in the best case no stack slot is necessary
+            throw JVMCIError.shouldNotReachHere("TraceIntervals can not be inactive!");
+
+        } else {
+            // search the position where the interval must have a register and split
+            // at the optimal position before.
+            // The new created part is added to the unhandled list and will get a register
+            // when it is activated
+            int minSplitPos = currentPos + 1;
+            int maxSplitPos = interval.nextUsage(RegisterPriority.MustHaveRegister, minSplitPos);
+
+            if (maxSplitPos <= interval.to()) {
+                splitBeforeUsage(interval, minSplitPos, maxSplitPos);
+            } else {
+                Debug.log("No more usage, no need to split: %s", interval);
+            }
+
+            assert interval.nextUsage(RegisterPriority.MustHaveRegister, currentPos) == Integer.MAX_VALUE : "the remaining part is spilled to stack and therefore has no register";
+            splitForSpilling(interval);
+        }
+    }
+
+    @SuppressWarnings("try")
+    private boolean allocFreeRegister(TraceInterval interval) {
+        try (Indent indent = Debug.logAndIndent("trying to find free register for %s", interval)) {
+
+            initUseLists(true);
+            freeExcludeActiveFixed();
+            freeCollectInactiveFixed(interval);
+            freeExcludeActiveAny();
+            // freeCollectUnhandled(fixedKind, cur);
+
+            // usePos contains the start of the next interval that has this register assigned
+            // (either as a fixed register or a normal allocated register in the past)
+            // only intervals overlapping with cur are processed, non-overlapping invervals can be
+            // ignored safely
+            if (Debug.isLogEnabled()) {
+                // Enable this logging to see all register states
+                try (Indent indent2 = Debug.logAndIndent("state of registers:")) {
+                    for (Register register : availableRegs) {
+                        int i = register.number;
+                        Debug.log("reg %d (%s): usePos: %d", register.number, register, usePos[i]);
+                    }
+                }
+            }
+
+            Register hint = null;
+            IntervalHint locationHint = interval.locationHint(true);
+            if (locationHint != null && locationHint.location() != null && isRegister(locationHint.location())) {
+                hint = asRegister(locationHint.location());
+                if (Debug.isLogEnabled()) {
+                    Debug.log("hint register %3d (%4s) from interval %s", hint.number, hint, locationHint);
+                }
+            }
+            assert interval.location() == null : "register already assigned to interval";
+
+            // the register must be free at least until this position
+            int regNeededUntil = interval.from() + 1;
+            int intervalTo = interval.to();
+
+            boolean needSplit = false;
+            int splitPos = -1;
+
+            Register reg = null;
+            Register minFullReg = null;
+            Register maxPartialReg = null;
+
+            for (Register availableReg : availableRegs) {
+                int number = availableReg.number;
+                if (usePos[number] >= intervalTo) {
+                    // this register is free for the full interval
+                    if (minFullReg == null || availableReg.equals(hint) || (usePos[number] < usePos[minFullReg.number] && !minFullReg.equals(hint))) {
+                        minFullReg = availableReg;
+                    }
+                } else if (usePos[number] > regNeededUntil) {
+                    // this register is at least free until regNeededUntil
+                    if (maxPartialReg == null || availableReg.equals(hint) || (usePos[number] > usePos[maxPartialReg.number] && !maxPartialReg.equals(hint))) {
+                        maxPartialReg = availableReg;
+                    }
+                }
+            }
+
+            if (minFullReg != null) {
+                reg = minFullReg;
+            } else if (maxPartialReg != null) {
+                needSplit = true;
+                reg = maxPartialReg;
+            } else {
+                return false;
+            }
+
+            splitPos = usePos[reg.number];
+            interval.assignLocation(reg.asValue(interval.kind()));
+            if (Debug.isLogEnabled()) {
+                Debug.log("selected register %d (%s)", reg.number, reg);
+            }
+
+            assert splitPos > 0 : "invalid splitPos";
+            if (needSplit) {
+                // register not available for full interval, so split it
+                splitWhenPartialRegisterAvailable(interval, splitPos);
+            }
+            // only return true if interval is completely assigned
+            return true;
+        }
+    }
+
+    private void splitAndSpillIntersectingIntervals(Register reg) {
+        assert reg != null : "no register assigned";
+
+        for (int i = 0; i < spillIntervals[reg.number].size(); i++) {
+            TraceInterval interval = spillIntervals[reg.number].get(i);
+            removeFromList(interval);
+            splitAndSpillInterval(interval);
+        }
+    }
+
+    // Split an Interval and spill it to memory so that cur can be placed in a register
+    @SuppressWarnings("try")
+    private void allocLockedRegister(TraceInterval interval) {
+        try (Indent indent = Debug.logAndIndent("alloc locked register: need to split and spill to get register for %s", interval)) {
+
+            // the register must be free at least until this position
+            int firstUsage = interval.firstUsage(RegisterPriority.MustHaveRegister);
+            int firstShouldHaveUsage = interval.firstUsage(RegisterPriority.ShouldHaveRegister);
+            int regNeededUntil = Math.min(firstUsage, interval.from() + 1);
+            int intervalTo = interval.to();
+            assert regNeededUntil >= 0 && regNeededUntil < Integer.MAX_VALUE : "interval has no use";
+
+            Register reg;
+            Register ignore;
+            /*
+             * In the common case we don't spill registers that have _any_ use position that is
+             * closer than the next use of the current interval, but if we can't spill the current
+             * interval we weaken this strategy and also allow spilling of intervals that have a
+             * non-mandatory requirements (no MustHaveRegister use position).
+             */
+            for (RegisterPriority registerPriority = RegisterPriority.LiveAtLoopEnd; true; registerPriority = RegisterPriority.MustHaveRegister) {
+                // collect current usage of registers
+                initUseLists(false);
+                spillExcludeActiveFixed();
+                // spillBlockUnhandledFixed(cur);
+                spillBlockInactiveFixed(interval);
+                spillCollectActiveAny(registerPriority);
+                if (Debug.isLogEnabled()) {
+                    printRegisterState();
+                }
+
+                reg = null;
+                ignore = interval.location() != null && isRegister(interval.location()) ? asRegister(interval.location()) : null;
+
+                for (Register availableReg : availableRegs) {
+                    int number = availableReg.number;
+                    if (availableReg.equals(ignore)) {
+                        // this register must be ignored
+                    } else if (usePos[number] > regNeededUntil) {
+                        /*
+                         * If the use position is the same, prefer registers (active intervals)
+                         * where the value is already on the stack.
+                         */
+                        if (reg == null || (usePos[number] > usePos[reg.number]) || (usePos[number] == usePos[reg.number] && (!isInMemory.get(reg.number) && isInMemory.get(number)))) {
+                            reg = availableReg;
+                        }
+                    }
+                }
+
+                if (Debug.isLogEnabled()) {
+                    Debug.log("Register Selected: %s", reg);
+                }
+
+                int regUsePos = (reg == null ? 0 : usePos[reg.number]);
+                if (regUsePos <= firstShouldHaveUsage) {
+                    /* Check if there is another interval that is already in memory. */
+                    if (reg == null || interval.inMemoryAt(currentPosition) || !isInMemory.get(reg.number)) {
+                        if (Debug.isLogEnabled()) {
+                            Debug.log("able to spill current interval. firstUsage(register): %d, usePos: %d", firstUsage, regUsePos);
+                        }
+
+                        if (firstUsage <= interval.from() + 1) {
+                            if (registerPriority.equals(RegisterPriority.LiveAtLoopEnd)) {
+                                /*
+                                 * Tool of last resort: we can not spill the current interval so we
+                                 * try to spill an active interval that has a usage but do not
+                                 * require a register.
+                                 */
+                                Debug.log("retry with register priority must have register");
+                                continue;
+                            }
+                            String description = "cannot spill interval (" + interval + ") that is used in first instruction (possible reason: no register found) firstUsage=" + firstUsage +
+                                            ", interval.from()=" + interval.from() + "; already used candidates: " + Arrays.toString(availableRegs);
+                            /*
+                             * assign a reasonable register and do a bailout in product mode to
+                             * avoid errors
+                             */
+                            allocator.assignSpillSlot(interval);
+                            Debug.dump(allocator.getLIR(), description);
+                            allocator.printIntervals(description);
+                            throw new OutOfRegistersException("LinearScan: no register found", description);
+                        }
+
+                        splitAndSpillInterval(interval);
+                        return;
+                    }
+                }
+                // common case: break out of the loop
+                break;
+            }
+
+            boolean needSplit = blockPos[reg.number] <= intervalTo;
+
+            int splitPos = blockPos[reg.number];
+
+            if (Debug.isLogEnabled()) {
+                Debug.log("decided to use register %d", reg.number);
+            }
+            assert splitPos > 0 : "invalid splitPos";
+            assert needSplit || splitPos > interval.from() : "splitting interval at from";
+
+            interval.assignLocation(reg.asValue(interval.kind()));
+            if (needSplit) {
+                // register not available for full interval : so split it
+                splitWhenPartialRegisterAvailable(interval, splitPos);
+            }
+
+            // perform splitting and spilling for all affected intervals
+            splitAndSpillIntersectingIntervals(reg);
+            return;
+        }
+    }
+
+    @SuppressWarnings("try")
+    private void printRegisterState() {
+        try (Indent indent2 = Debug.logAndIndent("state of registers:")) {
+            for (Register reg : availableRegs) {
+                int i = reg.number;
+                try (Indent indent3 = Debug.logAndIndent("reg %d: usePos: %d, blockPos: %d, inMemory: %b, intervals: ", i, usePos[i], blockPos[i], isInMemory.get(i))) {
+                    for (int j = 0; j < spillIntervals[i].size(); j++) {
+                        Debug.log("%s", spillIntervals[i].get(j));
+                    }
+                }
+            }
+        }
+    }
+
+    private boolean noAllocationPossible(TraceInterval interval) {
+        if (allocator.callKillsRegisters()) {
+            // fast calculation of intervals that can never get a register because the
+            // the next instruction is a call that blocks all registers
+            // Note: this only works if a call kills all registers
+
+            // check if this interval is the result of a split operation
+            // (an interval got a register until this position)
+            int pos = interval.from();
+            if (isOdd(pos)) {
+                // the current instruction is a call that blocks all registers
+                if (pos < allocator.maxOpId() && allocator.hasCall(pos + 1) && interval.to() > pos + 1) {
+                    if (Debug.isLogEnabled()) {
+                        Debug.log("free register cannot be available because all registers blocked by following call");
+                    }
+
+                    // safety check that there is really no register available
+                    assert !allocFreeRegister(interval) : "found a register for this interval";
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
+
+    private void initVarsForAlloc(TraceInterval interval) {
+        AllocatableRegisters allocatableRegisters = allocator.getRegisterAllocationConfig().getAllocatableRegisters(interval.kind().getPlatformKind());
+        availableRegs = allocatableRegisters.allocatableRegisters;
+        minReg = allocatableRegisters.minRegisterNumber;
+        maxReg = allocatableRegisters.maxRegisterNumber;
+    }
+
+    private static boolean isMove(LIRInstruction op, TraceInterval from, TraceInterval to) {
+        if (op instanceof ValueMoveOp) {
+            ValueMoveOp move = (ValueMoveOp) op;
+            if (isVariable(move.getInput()) && isVariable(move.getResult())) {
+                return move.getInput() != null && move.getInput().equals(from.operand) && move.getResult() != null && move.getResult().equals(to.operand);
+            }
+        }
+        return false;
+    }
+
+    // optimization (especially for phi functions of nested loops):
+    // assign same spill slot to non-intersecting intervals
+    private void combineSpilledIntervals(TraceInterval interval) {
+        if (interval.isSplitChild()) {
+            // optimization is only suitable for split parents
+            return;
+        }
+
+        IntervalHint locationHint = interval.locationHint(false);
+        if (locationHint == null || !(locationHint instanceof TraceInterval)) {
+            return;
+        }
+        TraceInterval registerHint = (TraceInterval) locationHint;
+        assert registerHint.isSplitParent() : "register hint must be split parent";
+
+        if (interval.spillState() != SpillState.NoOptimization || registerHint.spillState() != SpillState.NoOptimization) {
+            // combining the stack slots for intervals where spill move optimization is applied
+            // is not benefitial and would cause problems
+            return;
+        }
+
+        int beginPos = interval.from();
+        int endPos = interval.to();
+        if (endPos > allocator.maxOpId() || isOdd(beginPos) || isOdd(endPos)) {
+            // safety check that lirOpWithId is allowed
+            return;
+        }
+
+        if (!isMove(allocator.instructionForId(beginPos), registerHint, interval) || !isMove(allocator.instructionForId(endPos), interval, registerHint)) {
+            // cur and registerHint are not connected with two moves
+            return;
+        }
+
+        TraceInterval beginHint = registerHint.getSplitChildAtOpId(beginPos, LIRInstruction.OperandMode.USE, allocator);
+        TraceInterval endHint = registerHint.getSplitChildAtOpId(endPos, LIRInstruction.OperandMode.DEF, allocator);
+        if (beginHint == endHint || beginHint.to() != beginPos || endHint.from() != endPos) {
+            // registerHint must be split : otherwise the re-writing of use positions does not work
+            return;
+        }
+
+        assert beginHint.location() != null : "must have register assigned";
+        assert endHint.location() == null : "must not have register assigned";
+        assert interval.firstUsage(RegisterPriority.MustHaveRegister) == beginPos : "must have use position at begin of interval because of move";
+        assert endHint.firstUsage(RegisterPriority.MustHaveRegister) == endPos : "must have use position at begin of interval because of move";
+
+        if (isRegister(beginHint.location())) {
+            // registerHint is not spilled at beginPos : so it would not be benefitial to
+            // immediately spill cur
+            return;
+        }
+        assert registerHint.spillSlot() != null : "must be set when part of interval was spilled";
+
+        // modify intervals such that cur gets the same stack slot as registerHint
+        // delete use positions to prevent the intervals to get a register at beginning
+        interval.setSpillSlot(registerHint.spillSlot());
+        interval.removeFirstUsePos();
+        endHint.removeFirstUsePos();
+    }
+
+    // allocate a physical register or memory location to an interval
+    @Override
+    @SuppressWarnings("try")
+    protected boolean activateCurrent(TraceInterval interval) {
+        if (Debug.isLogEnabled()) {
+            logCurrentStatus();
+        }
+        boolean result = true;
+
+        try (Indent indent = Debug.logAndIndent("activating interval %s,  splitParent: %d", interval, interval.splitParent().operandNumber)) {
+
+            final Value operand = interval.operand;
+            if (interval.location() != null && isStackSlotValue(interval.location())) {
+                // activating an interval that has a stack slot assigned . split it at first use
+                // position
+                // used for method parameters
+                if (Debug.isLogEnabled()) {
+                    Debug.log("interval has spill slot assigned (method parameter) . split it before first use");
+                }
+                splitStackInterval(interval);
+                result = false;
+
+            } else {
+                if (interval.location() == null) {
+                    // interval has not assigned register . normal allocation
+                    // (this is the normal case for most intervals)
+                    if (Debug.isLogEnabled()) {
+                        Debug.log("normal allocation of register");
+                    }
+
+                    // assign same spill slot to non-intersecting intervals
+                    combineSpilledIntervals(interval);
+
+                    initVarsForAlloc(interval);
+                    if (noAllocationPossible(interval) || !allocFreeRegister(interval)) {
+                        // no empty register available.
+                        // split and spill another interval so that this interval gets a register
+                        allocLockedRegister(interval);
+                    }
+
+                    // spilled intervals need not be move to active-list
+                    if (!isRegister(interval.location())) {
+                        result = false;
+                    }
+                }
+            }
+
+            // load spilled values that become active from stack slot to register
+            if (interval.insertMoveWhenActivated()) {
+                assert interval.isSplitChild();
+                assert interval.currentSplitChild() != null;
+                assert !interval.currentSplitChild().operand.equals(operand) : "cannot insert move between same interval";
+                if (Debug.isLogEnabled()) {
+                    Debug.log("Inserting move from interval %d to %d because insertMoveWhenActivated is set", interval.currentSplitChild().operandNumber, interval.operandNumber);
+                }
+
+                insertMove(interval.from(), interval.currentSplitChild(), interval);
+            }
+            interval.makeCurrentSplitChild();
+
+        }
+
+        return result; // true = interval is moved to active list
+    }
+
+    void finishAllocation() {
+        // must be called when all intervals are allocated
+        moveResolver.resolveAndAppendMoves();
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/TraceLocalMoveResolver.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,553 @@
+/*
+ * Copyright (c) 2009, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import static com.oracle.graal.lir.LIRValueUtil.asVirtualStackSlot;
+import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
+import static com.oracle.graal.lir.LIRValueUtil.isVirtualStackSlot;
+import static jdk.vm.ci.code.ValueUtil.asRegister;
+import static jdk.vm.ci.code.ValueUtil.asStackSlot;
+import static jdk.vm.ci.code.ValueUtil.isIllegal;
+import static jdk.vm.ci.code.ValueUtil.isRegister;
+import static jdk.vm.ci.code.ValueUtil.isStackSlot;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+
+import jdk.vm.ci.code.StackSlot;
+import jdk.vm.ci.common.JVMCIError;
+import jdk.vm.ci.meta.AllocatableValue;
+import jdk.vm.ci.meta.Constant;
+import jdk.vm.ci.meta.JavaConstant;
+import jdk.vm.ci.meta.LIRKind;
+import jdk.vm.ci.meta.Value;
+
+import com.oracle.graal.debug.Debug;
+import com.oracle.graal.debug.Indent;
+import com.oracle.graal.lir.LIRInsertionBuffer;
+import com.oracle.graal.lir.LIRInstruction;
+import com.oracle.graal.lir.VirtualStackSlot;
+import com.oracle.graal.lir.framemap.FrameMap;
+import com.oracle.graal.lir.framemap.FrameMapBuilderTool;
+
+/**
+ */
+final class TraceLocalMoveResolver {
+
+    private static final int STACK_SLOT_IN_CALLER_FRAME_IDX = -1;
+    private final TraceLinearScan allocator;
+
+    private int insertIdx;
+    private LIRInsertionBuffer insertionBuffer; // buffer where moves are inserted
+
+    private final List<TraceInterval> mappingFrom;
+    private final List<Constant> mappingFromOpr;
+    private final List<TraceInterval> mappingTo;
+    private final int[] registerBlocked;
+
+    private int[] stackBlocked;
+    private final int firstVirtualStackIndex;
+
+    private int getStackArrayIndex(Value stackSlotValue) {
+        if (isStackSlot(stackSlotValue)) {
+            return getStackArrayIndex(asStackSlot(stackSlotValue));
+        }
+        if (isVirtualStackSlot(stackSlotValue)) {
+            return getStackArrayIndex(asVirtualStackSlot(stackSlotValue));
+        }
+        throw JVMCIError.shouldNotReachHere("value is not a stack slot: " + stackSlotValue);
+    }
+
+    private int getStackArrayIndex(StackSlot stackSlot) {
+        int stackIdx;
+        if (stackSlot.isInCallerFrame()) {
+            // incoming stack arguments can be ignored
+            stackIdx = STACK_SLOT_IN_CALLER_FRAME_IDX;
+        } else {
+            assert stackSlot.getRawAddFrameSize() : "Unexpected stack slot: " + stackSlot;
+            int offset = -stackSlot.getRawOffset();
+            assert 0 <= offset && offset < firstVirtualStackIndex : String.format("Wrong stack slot offset: %d (first virtual stack slot index: %d", offset, firstVirtualStackIndex);
+            stackIdx = offset;
+        }
+        return stackIdx;
+    }
+
+    private int getStackArrayIndex(VirtualStackSlot virtualStackSlot) {
+        return firstVirtualStackIndex + virtualStackSlot.getId();
+    }
+
+    protected void setValueBlocked(Value location, int direction) {
+        assert direction == 1 || direction == -1 : "out of bounds";
+        if (isStackSlotValue(location)) {
+            int stackIdx = getStackArrayIndex(location);
+            if (stackIdx == STACK_SLOT_IN_CALLER_FRAME_IDX) {
+                // incoming stack arguments can be ignored
+                return;
+            }
+            if (stackIdx >= stackBlocked.length) {
+                stackBlocked = Arrays.copyOf(stackBlocked, stackIdx + 1);
+            }
+            stackBlocked[stackIdx] += direction;
+        } else {
+            assert direction == 1 || direction == -1 : "out of bounds";
+            if (isRegister(location)) {
+                registerBlocked[asRegister(location).number] += direction;
+            } else {
+                throw JVMCIError.shouldNotReachHere("unhandled value " + location);
+            }
+        }
+    }
+
+    protected TraceInterval getMappingFrom(int i) {
+        return mappingFrom.get(i);
+    }
+
+    protected int mappingFromSize() {
+        return mappingFrom.size();
+    }
+
+    protected int valueBlocked(Value location) {
+        if (isStackSlotValue(location)) {
+            int stackIdx = getStackArrayIndex(location);
+            if (stackIdx == STACK_SLOT_IN_CALLER_FRAME_IDX) {
+                // incoming stack arguments are always blocked (aka they can not be written)
+                return 1;
+            }
+            if (stackIdx >= stackBlocked.length) {
+                return 0;
+            }
+            return stackBlocked[stackIdx];
+        }
+        if (isRegister(location)) {
+            return registerBlocked[asRegister(location).number];
+        }
+        throw JVMCIError.shouldNotReachHere("unhandled value " + location);
+    }
+
+    /*
+     * TODO (je) remove?
+     */
+    protected static boolean areMultipleReadsAllowed() {
+        return true;
+    }
+
+    boolean hasMappings() {
+        return mappingFrom.size() > 0;
+    }
+
+    protected TraceLinearScan getAllocator() {
+        return allocator;
+    }
+
+    protected TraceLocalMoveResolver(TraceLinearScan allocator) {
+
+        this.allocator = allocator;
+        this.mappingFrom = new ArrayList<>(8);
+        this.mappingFromOpr = new ArrayList<>(8);
+        this.mappingTo = new ArrayList<>(8);
+        this.insertIdx = -1;
+        this.insertionBuffer = new LIRInsertionBuffer();
+        this.registerBlocked = new int[allocator.getRegisters().length];
+        FrameMapBuilderTool frameMapBuilderTool = (FrameMapBuilderTool) allocator.getFrameMapBuilder();
+        FrameMap frameMap = frameMapBuilderTool.getFrameMap();
+        this.stackBlocked = new int[frameMapBuilderTool.getNumberOfStackSlots()];
+        this.firstVirtualStackIndex = !frameMap.frameNeedsAllocating() ? 0 : frameMap.currentFrameSize() + 1;
+    }
+
+    protected boolean checkEmpty() {
+        assert mappingFrom.size() == 0 && mappingFromOpr.size() == 0 && mappingTo.size() == 0 : "list must be empty before and after processing";
+        for (int i = 0; i < stackBlocked.length; i++) {
+            assert stackBlocked[i] == 0 : "stack map must be empty before and after processing";
+        }
+        for (int i = 0; i < getAllocator().getRegisters().length; i++) {
+            assert registerBlocked[i] == 0 : "register map must be empty before and after processing";
+        }
+        checkMultipleReads();
+        return true;
+    }
+
+    protected void checkMultipleReads() {
+        // multiple reads are allowed in SSA LSRA
+    }
+
+    private boolean verifyBeforeResolve() {
+        assert mappingFrom.size() == mappingFromOpr.size() : "length must be equal";
+        assert mappingFrom.size() == mappingTo.size() : "length must be equal";
+        assert insertIdx != -1 : "insert position not set";
+
+        int i;
+        int j;
+        if (!areMultipleReadsAllowed()) {
+            for (i = 0; i < mappingFrom.size(); i++) {
+                for (j = i + 1; j < mappingFrom.size(); j++) {
+                    assert mappingFrom.get(i) == null || mappingFrom.get(i) != mappingFrom.get(j) : "cannot read from same interval twice";
+                }
+            }
+        }
+
+        for (i = 0; i < mappingTo.size(); i++) {
+            for (j = i + 1; j < mappingTo.size(); j++) {
+                assert mappingTo.get(i) != mappingTo.get(j) : "cannot write to same interval twice";
+            }
+        }
+
+        HashSet<Value> usedRegs = new HashSet<>();
+        if (!areMultipleReadsAllowed()) {
+            for (i = 0; i < mappingFrom.size(); i++) {
+                TraceInterval interval = mappingFrom.get(i);
+                if (interval != null && !isIllegal(interval.location())) {
+                    boolean unique = usedRegs.add(interval.location());
+                    assert unique : "cannot read from same register twice";
+                }
+            }
+        }
+
+        usedRegs.clear();
+        for (i = 0; i < mappingTo.size(); i++) {
+            TraceInterval interval = mappingTo.get(i);
+            if (isIllegal(interval.location())) {
+                // After insertion the location may become illegal, so don't check it since multiple
+                // intervals might be illegal.
+                continue;
+            }
+            boolean unique = usedRegs.add(interval.location());
+            assert unique : "cannot write to same register twice";
+        }
+
+        verifyStackSlotMapping();
+
+        return true;
+    }
+
+    protected void verifyStackSlotMapping() {
+        // relax disjoint stack maps invariant
+    }
+
+    // mark assignedReg and assignedRegHi of the interval as blocked
+    private void blockRegisters(TraceInterval interval) {
+        Value location = interval.location();
+        if (mightBeBlocked(location)) {
+            assert areMultipleReadsAllowed() || valueBlocked(location) == 0 : "location already marked as used: " + location;
+            int direction = 1;
+            setValueBlocked(location, direction);
+            Debug.log("block %s", location);
+        }
+    }
+
+    // mark assignedReg and assignedRegHi of the interval as unblocked
+    private void unblockRegisters(TraceInterval interval) {
+        Value location = interval.location();
+        if (mightBeBlocked(location)) {
+            assert valueBlocked(location) > 0 : "location already marked as unused: " + location;
+            setValueBlocked(location, -1);
+            Debug.log("unblock %s", location);
+        }
+    }
+
+    /**
+     * Checks if the {@linkplain TraceInterval#location() location} of {@code to} is not blocked or
+     * is only blocked by {@code from}.
+     */
+    private boolean safeToProcessMove(TraceInterval from, TraceInterval to) {
+        Value fromReg = from != null ? from.location() : null;
+
+        Value location = to.location();
+        if (mightBeBlocked(location)) {
+            if ((valueBlocked(location) > 1 || (valueBlocked(location) == 1 && !isMoveToSelf(fromReg, location)))) {
+                return false;
+            }
+        }
+
+        return true;
+    }
+
+    protected static boolean isMoveToSelf(Value from, Value to) {
+        assert to != null;
+        if (to.equals(from)) {
+            return true;
+        }
+        if (from != null && isRegister(from) && isRegister(to) && asRegister(from).equals(asRegister(to))) {
+            assert LIRKind.verifyMoveKinds(to.getLIRKind(), from.getLIRKind()) : String.format("Same register but Kind mismatch %s <- %s", to, from);
+            return true;
+        }
+        return false;
+    }
+
+    protected static boolean mightBeBlocked(Value location) {
+        if (isRegister(location)) {
+            return true;
+        }
+        if (isStackSlotValue(location)) {
+            return true;
+        }
+        return false;
+    }
+
+    private void createInsertionBuffer(List<LIRInstruction> list) {
+        assert !insertionBuffer.initialized() : "overwriting existing buffer";
+        insertionBuffer.init(list);
+    }
+
+    private void appendInsertionBuffer() {
+        if (insertionBuffer.initialized()) {
+            insertionBuffer.finish();
+        }
+        assert !insertionBuffer.initialized() : "must be uninitialized now";
+
+        insertIdx = -1;
+    }
+
+    private void insertMove(TraceInterval fromInterval, TraceInterval toInterval) {
+        assert !fromInterval.operand.equals(toInterval.operand) : "from and to interval equal: " + fromInterval;
+        assert LIRKind.verifyMoveKinds(toInterval.kind(), fromInterval.kind()) : "move between different types";
+        assert insertIdx != -1 : "must setup insert position first";
+
+        insertionBuffer.append(insertIdx, createMove(fromInterval.operand, toInterval.operand, fromInterval.location(), toInterval.location()));
+
+        if (Debug.isLogEnabled()) {
+            Debug.log("insert move from %s to %s at %d", fromInterval, toInterval, insertIdx);
+        }
+    }
+
+    /**
+     * @param fromOpr {@link TraceInterval#operand operand} of the {@code from} interval
+     * @param toOpr {@link TraceInterval#operand operand} of the {@code to} interval
+     * @param fromLocation {@link TraceInterval#location() location} of the {@code to} interval
+     * @param toLocation {@link TraceInterval#location() location} of the {@code to} interval
+     */
+    protected LIRInstruction createMove(AllocatableValue fromOpr, AllocatableValue toOpr, AllocatableValue fromLocation, AllocatableValue toLocation) {
+        if (isStackSlotValue(toLocation) && isStackSlotValue(fromLocation)) {
+            return getAllocator().getSpillMoveFactory().createStackMove(toOpr, fromOpr);
+        }
+        return getAllocator().getSpillMoveFactory().createMove(toOpr, fromOpr);
+    }
+
+    private void insertMove(Constant fromOpr, TraceInterval toInterval) {
+        assert insertIdx != -1 : "must setup insert position first";
+
+        AllocatableValue toOpr = toInterval.operand;
+        LIRInstruction move = getAllocator().getSpillMoveFactory().createLoad(toOpr, fromOpr);
+        insertionBuffer.append(insertIdx, move);
+
+        if (Debug.isLogEnabled()) {
+            Debug.log("insert move from value %s to %s at %d", fromOpr, toInterval, insertIdx);
+        }
+    }
+
+    @SuppressWarnings("try")
+    private void resolveMappings() {
+        try (Indent indent = Debug.logAndIndent("resolveMapping")) {
+            assert verifyBeforeResolve();
+            if (Debug.isLogEnabled()) {
+                printMapping();
+            }
+
+            // Block all registers that are used as input operands of a move.
+            // When a register is blocked, no move to this register is emitted.
+            // This is necessary for detecting cycles in moves.
+            int i;
+            for (i = mappingFrom.size() - 1; i >= 0; i--) {
+                TraceInterval fromInterval = mappingFrom.get(i);
+                if (fromInterval != null) {
+                    blockRegisters(fromInterval);
+                }
+            }
+
+            int spillCandidate = -1;
+            while (mappingFrom.size() > 0) {
+                boolean processedInterval = false;
+
+                for (i = mappingFrom.size() - 1; i >= 0; i--) {
+                    TraceInterval fromInterval = mappingFrom.get(i);
+                    TraceInterval toInterval = mappingTo.get(i);
+
+                    if (safeToProcessMove(fromInterval, toInterval)) {
+                        // this interval can be processed because target is free
+                        if (fromInterval != null) {
+                            insertMove(fromInterval, toInterval);
+                            unblockRegisters(fromInterval);
+                        } else {
+                            insertMove(mappingFromOpr.get(i), toInterval);
+                        }
+                        mappingFrom.remove(i);
+                        mappingFromOpr.remove(i);
+                        mappingTo.remove(i);
+
+                        processedInterval = true;
+                    } else if (fromInterval != null && isRegister(fromInterval.location())) {
+                        // this interval cannot be processed now because target is not free
+                        // it starts in a register, so it is a possible candidate for spilling
+                        spillCandidate = i;
+                    }
+                }
+
+                if (!processedInterval) {
+                    breakCycle(spillCandidate);
+                }
+            }
+        }
+
+        // check that all intervals have been processed
+        assert checkEmpty();
+    }
+
+    protected void breakCycle(int spillCandidate) {
+        if (spillCandidate != -1) {
+            // no move could be processed because there is a cycle in the move list
+            // (e.g. r1 . r2, r2 . r1), so one interval must be spilled to memory
+            assert spillCandidate != -1 : "no interval in register for spilling found";
+
+            // create a new spill interval and assign a stack slot to it
+            TraceInterval fromInterval1 = mappingFrom.get(spillCandidate);
+            // do not allocate a new spill slot for temporary interval, but
+            // use spill slot assigned to fromInterval. Otherwise moves from
+            // one stack slot to another can happen (not allowed by LIRAssembler
+            AllocatableValue spillSlot1 = fromInterval1.spillSlot();
+            if (spillSlot1 == null) {
+                spillSlot1 = getAllocator().getFrameMapBuilder().allocateSpillSlot(fromInterval1.kind());
+                fromInterval1.setSpillSlot(spillSlot1);
+            }
+            spillInterval(spillCandidate, fromInterval1, spillSlot1);
+            return;
+        }
+        assert mappingFromSize() > 1;
+        // Arbitrarily select the first entry for spilling.
+        int stackSpillCandidate = 0;
+        TraceInterval fromInterval = getMappingFrom(stackSpillCandidate);
+        assert isStackSlotValue(fromInterval.location());
+        // allocate new stack slot
+        VirtualStackSlot spillSlot = getAllocator().getFrameMapBuilder().allocateSpillSlot(fromInterval.kind());
+        spillInterval(stackSpillCandidate, fromInterval, spillSlot);
+    }
+
+    protected void spillInterval(int spillCandidate, TraceInterval fromInterval, AllocatableValue spillSlot) {
+        assert mappingFrom.get(spillCandidate).equals(fromInterval);
+        TraceInterval spillInterval = getAllocator().createDerivedInterval(fromInterval);
+        spillInterval.setKind(fromInterval.kind());
+
+        // add a dummy range because real position is difficult to calculate
+        // Note: this range is a special case when the integrity of the allocation is
+        // checked
+        spillInterval.addRange(1, 2);
+
+        spillInterval.assignLocation(spillSlot);
+
+        if (Debug.isLogEnabled()) {
+            Debug.log("created new Interval for spilling: %s", spillInterval);
+        }
+        blockRegisters(spillInterval);
+
+        // insert a move from register to stack and update the mapping
+        insertMove(fromInterval, spillInterval);
+        mappingFrom.set(spillCandidate, spillInterval);
+        unblockRegisters(fromInterval);
+    }
+
+    @SuppressWarnings("try")
+    private void printMapping() {
+        try (Indent indent = Debug.logAndIndent("Mapping")) {
+            for (int i = mappingFrom.size() - 1; i >= 0; i--) {
+                TraceInterval fromInterval = mappingFrom.get(i);
+                TraceInterval toInterval = mappingTo.get(i);
+                String from;
+                Value to = toInterval.location();
+                if (fromInterval == null) {
+                    from = mappingFromOpr.get(i).toString();
+                } else {
+                    from = fromInterval.location().toString();
+                }
+                Debug.log("move %s <- %s", from, to);
+            }
+        }
+    }
+
+    void setInsertPosition(List<LIRInstruction> insertList, int insertIdx) {
+        assert this.insertIdx == -1 : "use moveInsertPosition instead of setInsertPosition when data already set";
+
+        createInsertionBuffer(insertList);
+        this.insertIdx = insertIdx;
+    }
+
+    void moveInsertPosition(List<LIRInstruction> newInsertList, int newInsertIdx) {
+        if (insertionBuffer.lirList() != null && (insertionBuffer.lirList() != newInsertList || this.insertIdx != newInsertIdx)) {
+            // insert position changed . resolve current mappings
+            resolveMappings();
+        }
+
+        assert insertionBuffer.lirList() != newInsertList || newInsertIdx >= insertIdx : String.format("Decreasing insert index: old=%d new=%d", insertIdx, newInsertIdx);
+
+        if (insertionBuffer.lirList() != newInsertList) {
+            // block changed . append insertionBuffer because it is
+            // bound to a specific block and create a new insertionBuffer
+            appendInsertionBuffer();
+            createInsertionBuffer(newInsertList);
+        }
+
+        this.insertIdx = newInsertIdx;
+    }
+
+    public void addMapping(TraceInterval fromInterval, TraceInterval toInterval) {
+
+        if (isIllegal(toInterval.location()) && toInterval.canMaterialize()) {
+            if (Debug.isLogEnabled()) {
+                Debug.log("no store to rematerializable interval %s needed", toInterval);
+            }
+            return;
+        }
+        if (isIllegal(fromInterval.location()) && fromInterval.canMaterialize()) {
+            // Instead of a reload, re-materialize the value
+            JavaConstant rematValue = fromInterval.getMaterializedValue();
+            addMapping(rematValue, toInterval);
+            return;
+        }
+        if (Debug.isLogEnabled()) {
+            Debug.log("add move mapping from %s to %s", fromInterval, toInterval);
+        }
+
+        assert !fromInterval.operand.equals(toInterval.operand) : "from and to interval equal: " + fromInterval;
+        assert LIRKind.verifyMoveKinds(toInterval.kind(), fromInterval.kind()) : String.format("Kind mismatch: %s vs. %s, from=%s, to=%s", fromInterval.kind(), toInterval.kind(), fromInterval,
+                        toInterval);
+        mappingFrom.add(fromInterval);
+        mappingFromOpr.add(null);
+        mappingTo.add(toInterval);
+    }
+
+    public void addMapping(Constant fromOpr, TraceInterval toInterval) {
+        if (Debug.isLogEnabled()) {
+            Debug.log("add move mapping from %s to %s", fromOpr, toInterval);
+        }
+
+        mappingFrom.add(null);
+        mappingFromOpr.add(fromOpr);
+        mappingTo.add(toInterval);
+    }
+
+    void resolveAndAppendMoves() {
+        if (hasMappings()) {
+            resolveMappings();
+        }
+        appendInsertionBuffer();
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.lir/src/com/oracle/graal/lir/alloc/trace/lsra/UsePosList.java	Mon Nov 30 17:18:36 2015 +0100
@@ -0,0 +1,125 @@
+/*
+ * Copyright (c) 2015, 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.lir.alloc.trace.lsra;
+
+import com.oracle.graal.compiler.common.util.IntList;
+import com.oracle.graal.lir.alloc.trace.lsra.TraceInterval.RegisterPriority;
+
+/**
+ * List of use positions. Each entry in the list records the use position and register priority
+ * associated with the use position. The entries in the list are in descending order of use
+ * position.
+ *
+ */
+public final class UsePosList {
+
+    private IntList list;
+
+    /**
+     * Creates a use list.
+     *
+     * @param initialCapacity the initial capacity of the list in terms of entries
+     */
+    public UsePosList(int initialCapacity) {
+        list = new IntList(initialCapacity * 2);
+    }
+
+    private UsePosList(IntList list) {
+        this.list = list;
+    }
+
+    /**
+     * Splits this list around a given position. All entries in this list with a use position
+     * greater or equal than {@code splitPos} are removed from this list and added to the returned
+     * list.
+     *
+     * @param splitPos the position for the split
+     * @return a use position list containing all entries removed from this list that have a use
+     *         position greater or equal than {@code splitPos}
+     */
+    public UsePosList splitAt(int splitPos) {
+        int i = size() - 1;
+        int len = 0;
+        while (i >= 0 && usePos(i) < splitPos) {
+            --i;
+            len += 2;
+        }
+        int listSplitIndex = (i + 1) * 2;
+        IntList childList = list;
+        list = IntList.copy(this.list, listSplitIndex, len);
+        childList.setSize(listSplitIndex);
+        UsePosList child = new UsePosList(childList);
+        return child;
+    }
+
+    /**
+     * Gets the use position at a specified index in this list.
+     *
+     * @param index the index of the entry for which the use position is returned
+     * @return the use position of entry {@code index} in this list
+     */
+    public int usePos(int index) {
+        return list.get(index << 1);
+    }
+
+    /**
+     * Gets the register priority for the use position at a specified index in this list.
+     *
+     * @param index the index of the entry for which the register priority is returned
+     * @return the register priority of entry {@code index} in this list
+     */
+    public RegisterPriority registerPriority(int index) {
+        return RegisterPriority.VALUES[list.get((index << 1) + 1)];
+    }
+
+    public void add(int usePos, RegisterPriority registerPriority) {
+        assert list.size() == 0 || usePos(size() - 1) > usePos;
+        list.add(usePos);
+        list.add(registerPriority.ordinal());
+    }
+
+    public int size() {
+        return list.size() >> 1;
+    }
+
+    public void removeLowestUsePos() {
+        list.setSize(list.size() - 2);
+    }
+
+    public void setRegisterPriority(int index, RegisterPriority registerPriority) {
+        list.set((index << 1) + 1, registerPriority.ordinal());
+    }
+
+    @Override
+    public String toString() {
+        StringBuilder buf = new StringBuilder("[");
+        for (int i = size() - 1; i >= 0; --i) {
+            if (buf.length() != 1) {
+                buf.append(", ");
+            }
+            RegisterPriority prio = registerPriority(i);
+            buf.append(usePos(i)).append(" -> ").append(prio.ordinal()).append(':').append(prio);
+        }
+        return buf.append("]").toString();
+    }
+}