changeset 23349:1e8342f17731

[AArch64]: A bunch of fixes and improvements.
author twisti
date Fri, 22 Jan 2016 13:50:04 -1000
parents 9306a9611097
children 38215ade8ba6
files graal/com.oracle.graal.asm.aarch64/src/com/oracle/graal/asm/aarch64/AArch64Address.java graal/com.oracle.graal.asm.aarch64/src/com/oracle/graal/asm/aarch64/AArch64MacroAssembler.java graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64AddressLowering.java graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64AddressNode.java graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64ArithmeticLIRGenerator.java graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64LIRGenerator.java graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64MoveFactory.java graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotBackend.java graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotDeoptimizeOp.java graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotEpilogueOp.java graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotLIRGenerator.java graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotMove.java graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotMoveFactory.java graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotNodeLIRBuilder.java graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotPatchReturnAddressOp.java graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64AddressValue.java graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64ArithmeticOp.java graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64BlockEndOp.java graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64LIRInstruction.java graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64Move.java
diffstat 20 files changed, 454 insertions(+), 311 deletions(-) [+]
line wrap: on
line diff
--- a/graal/com.oracle.graal.asm.aarch64/src/com/oracle/graal/asm/aarch64/AArch64Address.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.asm.aarch64/src/com/oracle/graal/asm/aarch64/AArch64Address.java	Fri Jan 22 13:50:04 2016 -1000
@@ -203,24 +203,33 @@
 
         switch (addressingMode) {
             case IMMEDIATE_SCALED:
-                return !base.equals(zr) && offset.equals(zr) && extendType == null && NumUtil.isUnsignedNbit(12, immediate);
+                assert !base.equals(zr) && offset.equals(zr) && extendType == null && NumUtil.isUnsignedNbit(12, immediate);
+                break;
             case IMMEDIATE_UNSCALED:
-                return !base.equals(zr) && offset.equals(zr) && extendType == null && NumUtil.isSignedNbit(9, immediate);
+                assert !base.equals(zr) && offset.equals(zr) && extendType == null && NumUtil.isSignedNbit(9, immediate);
+                break;
             case BASE_REGISTER_ONLY:
-                return !base.equals(zr) && offset.equals(zr) && extendType == null && immediate == 0;
+                assert !base.equals(zr) && offset.equals(zr) && extendType == null && immediate == 0;
+                break;
             case REGISTER_OFFSET:
-                return !base.equals(zr) && offset.getRegisterCategory().equals(AArch64.CPU) && extendType == null && immediate == 0;
+                assert !base.equals(zr) && offset.getRegisterCategory().equals(AArch64.CPU) && extendType == null && immediate == 0;
+                break;
             case EXTENDED_REGISTER_OFFSET:
-                return !base.equals(zr) && offset.getRegisterCategory().equals(AArch64.CPU) && (extendType == AArch64Assembler.ExtendType.SXTW || extendType == AArch64Assembler.ExtendType.UXTW) &&
+                assert !base.equals(zr) && offset.getRegisterCategory().equals(AArch64.CPU) && (extendType == AArch64Assembler.ExtendType.SXTW || extendType == AArch64Assembler.ExtendType.UXTW) &&
                                 immediate == 0;
+                break;
             case PC_LITERAL:
-                return base.equals(zr) && offset.equals(zr) && extendType == null && NumUtil.isSignedNbit(21, immediate) && ((immediate & 0x3) == 0);
+                assert base.equals(zr) && offset.equals(zr) && extendType == null && NumUtil.isSignedNbit(21, immediate) && ((immediate & 0x3) == 0);
+                break;
             case IMMEDIATE_POST_INDEXED:
             case IMMEDIATE_PRE_INDEXED:
-                return !base.equals(zr) && offset.equals(zr) && extendType == null && NumUtil.isSignedNbit(9, immediate);
+                assert !base.equals(zr) && offset.equals(zr) && extendType == null && NumUtil.isSignedNbit(9, immediate);
+                break;
             default:
                 throw JVMCIError.shouldNotReachHere();
         }
+
+        return true;
     }
 
     public Register getBase() {
--- a/graal/com.oracle.graal.asm.aarch64/src/com/oracle/graal/asm/aarch64/AArch64MacroAssembler.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.asm.aarch64/src/com/oracle/graal/asm/aarch64/AArch64MacroAssembler.java	Fri Jan 22 13:50:04 2016 -1000
@@ -333,13 +333,16 @@
      * Generates a move 64-bit immediate code sequence. The immediate may later be updated by
      * HotSpot.
      *
+     * In AArch64 mode the virtual address space is 48 bits in size, so we only need three
+     * instructions to create a patchable instruction sequence that can reach anywhere.
+     *
      * @param dst general purpose register. May not be null, stackpointer or zero-register.
      * @param imm
      */
     public void forceMov(Register dst, long imm, boolean optimize) {
         // We have to move all non zero parts of the immediate in 16-bit chunks
         boolean firstMove = true;
-        for (int offset = 0; offset < 64; offset += 16) {
+        for (int offset = 0; offset < 48; offset += 16) {
             int chunk = (int) (imm >> offset) & NumUtil.getNbitNumberInt(16);
             if (optimize && chunk == 0) {
                 continue;
@@ -359,16 +362,6 @@
     }
 
     /**
-     * Generates a move 64-bit immediate code sequence. The immediate may later be updated by
-     * HotSpot.
-     *
-     * @param dst general purpose register. May not be null, stackpointer or zero-register.
-     */
-    public void forceMov(Register dst, int imm) {
-        forceMov(dst, imm & 0xFFFF_FFFFL);
-    }
-
-    /**
      * Loads immediate into register.
      *
      * @param dst general purpose register. May not be null, zero-register or stackpointer.
--- a/graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64AddressLowering.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64AddressLowering.java	Fri Jan 22 13:50:04 2016 -1000
@@ -27,9 +27,6 @@
 import com.oracle.graal.nodes.memory.address.AddressNode;
 import com.oracle.graal.phases.common.AddressLoweringPhase.AddressLowering;
 
-import jdk.vm.ci.common.JVMCIError;
-import jdk.vm.ci.meta.JavaConstant;
-
 public class AArch64AddressLowering extends AddressLowering {
 
     @Override
@@ -39,16 +36,9 @@
 
     @Override
     public AddressNode lower(ValueNode base, ValueNode offset) {
-        asImmediate(base);
-        throw JVMCIError.unimplemented();
+        AArch64AddressNode ret = new AArch64AddressNode(base, offset);
+        // TODO improve
+        return base.graph().unique(ret);
     }
 
-    private static JavaConstant asImmediate(ValueNode value) {
-        JavaConstant c = value.asJavaConstant();
-        if (c != null && c.getJavaKind().isNumericInteger()) {
-            return c;
-        } else {
-            return null;
-        }
-    }
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64AddressNode.java	Fri Jan 22 13:50:04 2016 -1000
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+package com.oracle.graal.compiler.aarch64;
+
+import com.oracle.graal.asm.aarch64.AArch64Address;
+import com.oracle.graal.asm.aarch64.AArch64Address.AddressingMode;
+import com.oracle.graal.graph.NodeClass;
+import com.oracle.graal.lir.aarch64.AArch64AddressValue;
+import com.oracle.graal.lir.gen.LIRGeneratorTool;
+import com.oracle.graal.nodeinfo.NodeInfo;
+import com.oracle.graal.nodes.ValueNode;
+import com.oracle.graal.nodes.memory.address.AddressNode;
+import com.oracle.graal.nodes.spi.LIRLowerable;
+import com.oracle.graal.nodes.spi.NodeLIRBuilderTool;
+
+import jdk.vm.ci.meta.AllocatableValue;
+import jdk.vm.ci.meta.LIRKind;
+import jdk.vm.ci.meta.Value;
+
+/**
+ * Represents an address of the form... TODO.
+ */
+@NodeInfo
+public class AArch64AddressNode extends AddressNode implements LIRLowerable {
+
+    public static final NodeClass<AArch64AddressNode> TYPE = NodeClass.create(AArch64AddressNode.class);
+
+    @OptionalInput private ValueNode base;
+
+    @OptionalInput private ValueNode index;
+    private AArch64Address.AddressingMode addressingMode;
+
+    private int displacement;
+
+    public AArch64AddressNode(ValueNode base) {
+        this(base, null);
+    }
+
+    public AArch64AddressNode(ValueNode base, ValueNode index) {
+        super(TYPE);
+        this.base = base;
+        this.index = index;
+        this.addressingMode = AddressingMode.IMMEDIATE_UNSCALED;
+    }
+
+    public void generate(NodeLIRBuilderTool gen) {
+        LIRGeneratorTool tool = gen.getLIRGeneratorTool();
+
+        AllocatableValue baseValue = base == null ? Value.ILLEGAL : tool.asAllocatable(gen.operand(base));
+        AllocatableValue indexValue = index == null ? Value.ILLEGAL : tool.asAllocatable(gen.operand(index));
+
+        AllocatableValue baseReference = LIRKind.derivedBaseFromValue(baseValue);
+        AllocatableValue indexReference;
+        if (addressingMode.equals(AddressingMode.IMMEDIATE_UNSCALED)) {
+            indexReference = LIRKind.derivedBaseFromValue(indexValue);
+        } else {
+            if (indexValue.getLIRKind().isValue()) {
+                indexReference = null;
+            } else {
+                indexReference = Value.ILLEGAL;
+            }
+        }
+
+        LIRKind kind = LIRKind.combineDerived(tool.getLIRKind(stamp()), baseReference, indexReference);
+        gen.setResult(this, new AArch64AddressValue(kind, baseValue, indexValue, displacement, false, addressingMode));
+    }
+
+    public ValueNode getBase() {
+        return base;
+    }
+
+    public void setBase(ValueNode base) {
+        // allow modification before inserting into the graph
+        if (isAlive()) {
+            updateUsages(this.base, base);
+        }
+        this.base = base;
+    }
+
+    public ValueNode getIndex() {
+        return index;
+    }
+
+    public void setIndex(ValueNode index) {
+        // allow modification before inserting into the graph
+        if (isAlive()) {
+            updateUsages(this.index, index);
+        }
+        this.index = index;
+    }
+
+    public int getDisplacement() {
+        return displacement;
+    }
+
+    public void setDisplacement(int displacement) {
+        this.displacement = displacement;
+    }
+}
--- a/graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64ArithmeticLIRGenerator.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64ArithmeticLIRGenerator.java	Fri Jan 22 13:50:04 2016 -1000
@@ -209,17 +209,19 @@
 
     @Override
     public Value emitNarrow(Value inputVal, int bits) {
-        assert inputVal.getPlatformKind() == AArch64Kind.QWORD && bits == 32 : "Can only convert from long to int";
-        LIRKind resultKind = getResultLirKind(bits, inputVal);
-        long mask = NumUtil.getNbitNumberLong(bits);
-        Value maskValue = new ConstantValue(resultKind, JavaConstant.forLong(mask));
-        return emitBinary(resultKind, AArch64ArithmeticOp.AND, true, inputVal, maskValue);
+        if (inputVal.getPlatformKind() == AArch64Kind.QWORD && bits <= 32) {
+            LIRKind resultKind = getResultLirKind(bits, inputVal);
+            long mask = NumUtil.getNbitNumberLong(bits);
+            Value maskValue = new ConstantValue(resultKind, JavaConstant.forLong(mask));
+            return emitBinary(resultKind, AArch64ArithmeticOp.AND, true, inputVal, maskValue);
+        } else {
+            return inputVal;
+        }
     }
 
     @Override
     public Value emitZeroExtend(Value inputVal, int fromBits, int toBits) {
         assert fromBits <= toBits && (toBits == 32 || toBits == 64);
-        assert isNumericInteger(inputVal.getPlatformKind());
         if (fromBits == toBits) {
             return inputVal;
         }
--- a/graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64LIRGenerator.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64LIRGenerator.java	Fri Jan 22 13:50:04 2016 -1000
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -24,20 +24,7 @@
 
 import static com.oracle.graal.lir.LIRValueUtil.asJavaConstant;
 import static com.oracle.graal.lir.LIRValueUtil.isJavaConstant;
-import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
 import static jdk.vm.ci.code.ValueUtil.asAllocatableValue;
-import static jdk.vm.ci.code.ValueUtil.isStackSlot;
-import jdk.vm.ci.aarch64.AArch64Kind;
-import jdk.vm.ci.amd64.AMD64Kind;
-import jdk.vm.ci.code.RegisterValue;
-import jdk.vm.ci.common.JVMCIError;
-import jdk.vm.ci.meta.AllocatableValue;
-import jdk.vm.ci.meta.Constant;
-import jdk.vm.ci.meta.JavaConstant;
-import jdk.vm.ci.meta.JavaKind;
-import jdk.vm.ci.meta.LIRKind;
-import jdk.vm.ci.meta.PlatformKind;
-import jdk.vm.ci.meta.Value;
 
 import com.oracle.graal.asm.NumUtil;
 import com.oracle.graal.asm.aarch64.AArch64Address;
@@ -61,7 +48,6 @@
 import com.oracle.graal.lir.aarch64.AArch64ControlFlow;
 import com.oracle.graal.lir.aarch64.AArch64ControlFlow.BranchOp;
 import com.oracle.graal.lir.aarch64.AArch64ControlFlow.CondMoveOp;
-import com.oracle.graal.lir.aarch64.AArch64LIRInstruction;
 import com.oracle.graal.lir.aarch64.AArch64Move;
 import com.oracle.graal.lir.aarch64.AArch64Move.CompareAndSwap;
 import com.oracle.graal.lir.aarch64.AArch64Move.MembarOp;
@@ -70,25 +56,20 @@
 import com.oracle.graal.lir.gen.LIRGenerator;
 import com.oracle.graal.phases.util.Providers;
 
+import jdk.vm.ci.aarch64.AArch64Kind;
+import jdk.vm.ci.code.RegisterValue;
+import jdk.vm.ci.common.JVMCIError;
+import jdk.vm.ci.meta.AllocatableValue;
+import jdk.vm.ci.meta.JavaConstant;
+import jdk.vm.ci.meta.JavaKind;
+import jdk.vm.ci.meta.LIRKind;
+import jdk.vm.ci.meta.PlatformKind;
+import jdk.vm.ci.meta.Value;
+
 public abstract class AArch64LIRGenerator extends LIRGenerator {
 
-    @SuppressWarnings("unused") private final ConstantTableBaseProvider constantTableBaseProvider;
-
-    public static final class ConstantTableBaseProvider {
-        // private Variable constantTableBase;
-        @SuppressWarnings("unused") private boolean useConstantTableBase = false;
-
-        public Variable getConstantTableBase() {
-            useConstantTableBase = true;
-            // return constantTableBase;
-            return null;
-        }
-    }
-
-    public AArch64LIRGenerator(LIRKindTool lirKindTool, AArch64ArithmeticLIRGenerator arithmeticLIRGen, MoveFactory moveFactory, Providers providers, LIRGenerationResult lirGenRes,
-                    ConstantTableBaseProvider constantTableBaseProvider) {
+    public AArch64LIRGenerator(LIRKindTool lirKindTool, AArch64ArithmeticLIRGenerator arithmeticLIRGen, MoveFactory moveFactory, Providers providers, LIRGenerationResult lirGenRes) {
         super(lirKindTool, arithmeticLIRGen, moveFactory, providers, lirGenRes);
-        this.constantTableBaseProvider = constantTableBaseProvider;
     }
 
     /**
@@ -114,7 +95,7 @@
         switch ((AArch64Kind) kind.getPlatformKind()) {
             case BYTE:
             case WORD:
-                return kind.changeType(AMD64Kind.DWORD);
+                return kind.changeType(AArch64Kind.DWORD);
             default:
                 return kind;
         }
@@ -141,48 +122,6 @@
     }
 
     @Override
-    public void emitMove(AllocatableValue dst, Value src) {
-        append(createMove(dst, src));
-    }
-
-    @Override
-    public void emitMoveConstant(AllocatableValue dst, Constant src) {
-        append(createMoveConstant(dst, (JavaConstant) src));
-    }
-
-    /**
-     * Moves src to dst.
-     *
-     * If src is AArch64AddressValue the address value is loaded into dst, not the value pointed to
-     * by address. All valid combinations of src and dst values are supported, except StackSlot to
-     * StackSlot.
-     *
-     * @param dst Value stored on stack or in register. Non null.
-     * @param src Arbitrary input value. Non null.
-     * @return AArch64LIRInstruction representing the move. Non null.
-     */
-    protected AArch64LIRInstruction createMove(AllocatableValue dst, Value src) {
-        if (src instanceof AArch64AddressValue) {
-            return new AArch64Move.LoadAddressOp(dst, (AArch64AddressValue) src);
-        } else if (isStackSlot(dst)) {
-            return new AArch64Move.MoveToStackOp(dst, asAllocatable(src));
-        } else {
-            return new AArch64Move.MoveToRegOp(dst, asAllocatable(src));
-        }
-    }
-
-    protected AArch64LIRInstruction createMoveConstant(AllocatableValue dst, JavaConstant src) {
-        if (isStackSlotValue(dst)) {
-            // constant -> stack is not possible so we need a scratch register in between.
-            Variable tmp = newVariable(dst.getLIRKind());
-            append(new AArch64Move.MoveFromConstOp(tmp, src));
-            return new AArch64Move.MoveToStackOp(dst, tmp);
-        } else {
-            return new AArch64Move.MoveFromConstOp(dst, src);
-        }
-    }
-
-    @Override
     public Variable emitCompareAndSwap(Value address, Value expectedValue, Value newValue, Value trueValue, Value falseValue) {
         AArch64AddressValue addressValue = convertToBaseRegisterOnlyAddress(asAddressValue(address));
         Variable result = newVariable(trueValue.getLIRKind());
@@ -288,8 +227,7 @@
      */
     @Override
     public Variable emitConditionalMove(PlatformKind cmpKind, Value left, Value right, Condition cond, boolean unorderedIsTrue, Value trueValue, Value falseValue) {
-        assert cmpKind == left.getPlatformKind() && cmpKind == right.getPlatformKind();
-        boolean mirrored = emitCompare(left, right, cond, unorderedIsTrue);
+        boolean mirrored = emitCompare(cmpKind, left, right, cond, unorderedIsTrue);
         Condition finalCondition = mirrored ? cond.mirror() : cond;
         boolean finalUnorderedIsTrue = mirrored ? !unorderedIsTrue : unorderedIsTrue;
         ConditionFlag cmpCondition = toConditionFlag(((AArch64Kind) cmpKind).isInteger(), finalCondition, finalUnorderedIsTrue);
@@ -301,8 +239,7 @@
     @Override
     public void emitCompareBranch(PlatformKind cmpKind, Value left, Value right, Condition cond, boolean unorderedIsTrue, LabelRef trueDestination, LabelRef falseDestination,
                     double trueDestinationProbability) {
-        assert cmpKind == left.getPlatformKind() && cmpKind == right.getPlatformKind();
-        boolean mirrored = emitCompare(left, right, cond, unorderedIsTrue);
+        boolean mirrored = emitCompare(cmpKind, left, right, cond, unorderedIsTrue);
         Condition finalCondition = mirrored ? cond.mirror() : cond;
         boolean finalUnorderedIsTrue = mirrored ? !unorderedIsTrue : unorderedIsTrue;
         ConditionFlag cmpCondition = toConditionFlag(((AArch64Kind) cmpKind).isInteger(), finalCondition, finalUnorderedIsTrue);
@@ -368,17 +305,19 @@
     }
 
     /**
-     * Emits a gpCompare instruction, possibly reordering the parameters.
+     * This method emits the compare instruction, and may reorder the operands. It returns true if
+     * it did so.
      *
      * @param a the left operand of the comparison. Has to have same type as b. Non null.
      * @param b the right operand of the comparison. Has to have same type as a. Non null.
      * @return true if mirrored (i.e. "b cmp a" instead of "a cmp b" was done).
      */
-    private boolean emitCompare(Value a, Value b, Condition condition, boolean unorderedIsTrue) {
-        boolean mirrored;
+    private boolean emitCompare(PlatformKind cmpKind, Value a, Value b, Condition condition, boolean unorderedIsTrue) {
         AllocatableValue left;
         Value right;
-        if (((AArch64Kind) a.getPlatformKind()).isInteger()) {
+        boolean mirrored;
+        AArch64Kind kind = (AArch64Kind) cmpKind;
+        if (kind.isInteger()) {
             if (LIRValueUtil.isVariable(b) || b instanceof RegisterValue) {
                 left = loadReg(b);
                 right = loadNonConst(a);
--- a/graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64MoveFactory.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.compiler.aarch64/src/com/oracle/graal/compiler/aarch64/AArch64MoveFactory.java	Fri Jan 22 13:50:04 2016 -1000
@@ -27,9 +27,10 @@
 import static com.oracle.graal.lir.LIRValueUtil.isConstantValue;
 import static com.oracle.graal.lir.LIRValueUtil.isStackSlotValue;
 
-import com.oracle.graal.compiler.aarch64.AArch64LIRGenerator.ConstantTableBaseProvider;
+import com.oracle.graal.asm.aarch64.AArch64MacroAssembler;
 import com.oracle.graal.lir.LIRInstruction;
 import com.oracle.graal.lir.aarch64.AArch64AddressValue;
+import com.oracle.graal.lir.aarch64.AArch64Move;
 import com.oracle.graal.lir.aarch64.AArch64Move.LoadAddressOp;
 import com.oracle.graal.lir.gen.LIRGeneratorTool.MoveFactory;
 
@@ -41,12 +42,6 @@
 
 public class AArch64MoveFactory implements MoveFactory {
 
-    protected final ConstantTableBaseProvider constantTableBaseProvider;
-
-    public AArch64MoveFactory(ConstantTableBaseProvider constantTableBaseProvider) {
-        this.constantTableBaseProvider = constantTableBaseProvider;
-    }
-
     @Override
     public LIRInstruction createMove(AllocatableValue dst, Value src) {
         boolean srcIsSlot = isStackSlotValue(src);
@@ -60,16 +55,14 @@
             if (srcIsSlot && dstIsSlot) {
                 throw JVMCIError.shouldNotReachHere(src.getClass() + " " + dst.getClass());
             } else {
-                // return new Move(dst, (AllocatableValue) src);
-                throw JVMCIError.unimplemented();
+                return new AArch64Move.Move(dst, (AllocatableValue) src);
             }
         }
     }
 
     @Override
     public LIRInstruction createStackMove(AllocatableValue result, AllocatableValue input) {
-        // return new AArch64Move.Move(result, input);
-        throw JVMCIError.unimplemented();
+        return new AArch64Move.Move(result, input);
     }
 
     @Override
@@ -77,15 +70,15 @@
         if (src instanceof JavaConstant) {
             JavaConstant javaConstant = (JavaConstant) src;
             if (canInlineConstant(javaConstant)) {
-                // return new AArch64Move.LoadInlineConstant(javaConstant, dst);
-                throw JVMCIError.unimplemented();
+                return new AArch64Move.LoadInlineConstant(javaConstant, dst);
             } else {
                 // return new AArch64Move.LoadConstantFromTable(javaConstant,
                 // constantTableBaseProvider.getConstantTableBase(), dst);
-                throw JVMCIError.unimplemented();
+                return new AArch64Move.LoadInlineConstant(javaConstant, dst);
             }
         } else {
-            throw JVMCIError.shouldNotReachHere(src.getClass().toString());
+            // throw JVMCIError.shouldNotReachHere(src.getClass().toString());
+            throw JVMCIError.unimplemented();
         }
     }
 
@@ -97,11 +90,9 @@
             case Char:
             case Short:
             case Int:
-                // return SPARCAssembler.isSimm13(c.asInt()) && !codeCache.needsDataPatch(c);
-                throw JVMCIError.unimplemented();
+                return AArch64MacroAssembler.isMovableImmediate(c.asInt());
             case Long:
-                // return SPARCAssembler.isSimm13(c.asLong()) && !codeCache.needsDataPatch(c);
-                throw JVMCIError.unimplemented();
+                return AArch64MacroAssembler.isMovableImmediate(c.asLong());
             case Object:
                 return c.isNull();
             default:
--- a/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotBackend.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotBackend.java	Fri Jan 22 13:50:04 2016 -1000
@@ -162,7 +162,7 @@
                     }
                     masm.mov(64, sp, scratch);
                 } else {
-                    if (AArch64MacroAssembler.isArithmeticImmediate(totalFrameSize)) {
+                    if (AArch64MacroAssembler.isArithmeticImmediate(frameSize)) {
                         masm.sub(64, sp, scratch, frameSize);
                     } else {
                         try (ScratchRegister sc2 = masm.getScratchRegister()) {
@@ -180,7 +180,7 @@
         public void leave(CompilationResultBuilder crb) {
             AArch64MacroAssembler masm = (AArch64MacroAssembler) crb.asm;
             crb.blockComment("[method epilogue]");
-            final int frameSize = crb.frameMap.totalFrameSize();
+            final int frameSize = crb.frameMap.frameSize();
             if (AArch64MacroAssembler.isArithmeticImmediate(frameSize)) {
                 masm.add(64, sp, sp, frameSize);
             } else {
--- a/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotDeoptimizeOp.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotDeoptimizeOp.java	Fri Jan 22 13:50:04 2016 -1000
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -22,20 +22,21 @@
  */
 package com.oracle.graal.hotspot.aarch64;
 
+import static com.oracle.graal.hotspot.HotSpotHostBackend.UNCOMMON_TRAP_HANDLER;
+
 import com.oracle.graal.asm.aarch64.AArch64MacroAssembler;
 import com.oracle.graal.lir.LIRFrameState;
 import com.oracle.graal.lir.LIRInstructionClass;
 import com.oracle.graal.lir.Opcode;
-import com.oracle.graal.lir.StandardOp;
+import com.oracle.graal.lir.StandardOp.BlockEndOp;
 import com.oracle.graal.lir.aarch64.AArch64BlockEndOp;
 import com.oracle.graal.lir.aarch64.AArch64Call;
 import com.oracle.graal.lir.asm.CompilationResultBuilder;
 
-import static com.oracle.graal.hotspot.HotSpotHostBackend.UNCOMMON_TRAP_HANDLER;
+@Opcode("DEOPT")
+public class AArch64HotSpotDeoptimizeOp extends AArch64BlockEndOp implements BlockEndOp {
+    public static final LIRInstructionClass<AArch64HotSpotDeoptimizeOp> TYPE = LIRInstructionClass.create(AArch64HotSpotDeoptimizeOp.class);
 
-@Opcode("DEOPT")
-public class AArch64HotSpotDeoptimizeOp extends AArch64BlockEndOp implements StandardOp.BlockEndOp {
-    public static final LIRInstructionClass<AArch64HotSpotDeoptimizeOp> TYPE = LIRInstructionClass.create(AArch64HotSpotDeoptimizeOp.class);
     @State private LIRFrameState info;
 
     public AArch64HotSpotDeoptimizeOp(LIRFrameState info) {
--- a/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotEpilogueOp.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotEpilogueOp.java	Fri Jan 22 13:50:04 2016 -1000
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013, 2015, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -25,18 +25,20 @@
 import com.oracle.graal.asm.aarch64.AArch64MacroAssembler;
 import com.oracle.graal.asm.aarch64.AArch64MacroAssembler.ScratchRegister;
 import com.oracle.graal.lir.LIRInstructionClass;
-import com.oracle.graal.lir.StandardOp;
 import com.oracle.graal.lir.aarch64.AArch64BlockEndOp;
 import com.oracle.graal.lir.asm.CompilationResultBuilder;
 
 import jdk.vm.ci.code.Register;
 import jdk.vm.ci.hotspot.HotSpotVMConfig;
 
-abstract class AArch64HotSpotEpilogueOp extends AArch64BlockEndOp implements StandardOp.BlockEndOp {
+/**
+ * Superclass for operations that leave a method's frame.
+ */
+abstract class AArch64HotSpotEpilogueOp extends AArch64BlockEndOp {
 
     private final HotSpotVMConfig config;
 
-    protected AArch64HotSpotEpilogueOp(LIRInstructionClass<? extends StandardOp.AbstractBlockEndOp> c, HotSpotVMConfig config) {
+    protected AArch64HotSpotEpilogueOp(LIRInstructionClass<? extends AArch64HotSpotEpilogueOp> c, HotSpotVMConfig config) {
         super(c);
         this.config = config;
     }
--- a/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotLIRGenerator.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotLIRGenerator.java	Fri Jan 22 13:50:04 2016 -1000
@@ -23,20 +23,6 @@
 
 package com.oracle.graal.hotspot.aarch64;
 
-import jdk.vm.ci.aarch64.AArch64;
-import jdk.vm.ci.aarch64.AArch64Kind;
-import jdk.vm.ci.code.CallingConvention;
-import jdk.vm.ci.code.RegisterValue;
-import jdk.vm.ci.common.JVMCIError;
-import jdk.vm.ci.hotspot.HotSpotVMConfig;
-import jdk.vm.ci.meta.AllocatableValue;
-import jdk.vm.ci.meta.DeoptimizationAction;
-import jdk.vm.ci.meta.DeoptimizationReason;
-import jdk.vm.ci.meta.JavaConstant;
-import jdk.vm.ci.meta.JavaKind;
-import jdk.vm.ci.meta.LIRKind;
-import jdk.vm.ci.meta.Value;
-
 import com.oracle.graal.asm.aarch64.AArch64Address;
 import com.oracle.graal.compiler.aarch64.AArch64ArithmeticLIRGenerator;
 import com.oracle.graal.compiler.aarch64.AArch64LIRGenerator;
@@ -58,6 +44,19 @@
 import com.oracle.graal.lir.aarch64.AArch64Move;
 import com.oracle.graal.lir.gen.LIRGenerationResult;
 
+import jdk.vm.ci.aarch64.AArch64Kind;
+import jdk.vm.ci.code.CallingConvention;
+import jdk.vm.ci.code.RegisterValue;
+import jdk.vm.ci.common.JVMCIError;
+import jdk.vm.ci.hotspot.HotSpotVMConfig;
+import jdk.vm.ci.meta.AllocatableValue;
+import jdk.vm.ci.meta.DeoptimizationAction;
+import jdk.vm.ci.meta.DeoptimizationReason;
+import jdk.vm.ci.meta.JavaConstant;
+import jdk.vm.ci.meta.JavaKind;
+import jdk.vm.ci.meta.LIRKind;
+import jdk.vm.ci.meta.Value;
+
 /**
  * LIR generator specialized for AArch64 HotSpot.
  */
@@ -67,16 +66,12 @@
     private HotSpotDebugInfoBuilder debugInfoBuilder;
 
     protected AArch64HotSpotLIRGenerator(HotSpotProviders providers, HotSpotVMConfig config, LIRGenerationResult lirGenRes) {
-        this(providers, config, lirGenRes, new ConstantTableBaseProvider());
-    }
-
-    private AArch64HotSpotLIRGenerator(HotSpotProviders providers, HotSpotVMConfig config, LIRGenerationResult lirGenRes, ConstantTableBaseProvider constantTableBaseProvider) {
-        this(new AArch64HotSpotLIRKindTool(), new AArch64ArithmeticLIRGenerator(), new AArch64HotSpotMoveFactory(constantTableBaseProvider), providers, config, lirGenRes, constantTableBaseProvider);
+        this(new AArch64HotSpotLIRKindTool(), new AArch64ArithmeticLIRGenerator(), new AArch64HotSpotMoveFactory(), providers, config, lirGenRes);
     }
 
     protected AArch64HotSpotLIRGenerator(LIRKindTool lirKindTool, AArch64ArithmeticLIRGenerator arithmeticLIRGen, MoveFactory moveFactory, HotSpotProviders providers, HotSpotVMConfig config,
-                    LIRGenerationResult lirGenRes, ConstantTableBaseProvider constantTableBaseProvider) {
-        super(lirKindTool, arithmeticLIRGen, moveFactory, providers, lirGenRes, constantTableBaseProvider);
+                    LIRGenerationResult lirGenRes) {
+        super(lirKindTool, arithmeticLIRGen, moveFactory, providers, lirGenRes);
         this.config = config;
     }
 
@@ -111,39 +106,52 @@
         return debugInfoBuilder.lockStack();
     }
 
-    @SuppressWarnings("unused")
     @Override
     public Value emitCompress(Value pointer, HotSpotVMConfig.CompressEncoding encoding, boolean nonNull) {
         LIRKind inputKind = pointer.getLIRKind();
         assert inputKind.getPlatformKind() == AArch64Kind.QWORD;
-        Variable result = newVariable(LIRKind.reference(AArch64Kind.DWORD));
-        AllocatableValue base = getCompressionBase(encoding, inputKind);
-        // TODO (das) continue here.
-        throw JVMCIError.unimplemented("finish implementation");
-    }
-
-    private AllocatableValue getCompressionBase(HotSpotVMConfig.CompressEncoding encoding, LIRKind inputKind) {
         if (inputKind.isReference(0)) {
             // oop
-            return getProviders().getRegisters().getHeapBaseRegister().asValue();
+            Variable result = newVariable(LIRKind.reference(AArch64Kind.DWORD));
+            append(new AArch64HotSpotMove.CompressPointer(result, asAllocatable(pointer), getProviders().getRegisters().getHeapBaseRegister().asValue(), encoding, nonNull));
+            return result;
         } else {
             // metaspace pointer
-            if (encoding.base == 0) {
-                return AArch64.zr.asValue(LIRKind.value(AArch64Kind.QWORD));
-            } else {
-                return emitLoadConstant(LIRKind.value(AArch64Kind.QWORD), JavaConstant.forLong(encoding.base));
+            Variable result = newVariable(LIRKind.value(AArch64Kind.DWORD));
+            AllocatableValue base = Value.ILLEGAL;
+            if (encoding.base != 0) {
+                base = emitLoadConstant(LIRKind.value(AArch64Kind.QWORD), JavaConstant.forLong(encoding.base));
             }
+            append(new AArch64HotSpotMove.CompressPointer(result, asAllocatable(pointer), base, encoding, nonNull));
+            return result;
         }
     }
 
     @Override
     public Value emitUncompress(Value pointer, HotSpotVMConfig.CompressEncoding encoding, boolean nonNull) {
-        return null;
+        LIRKind inputKind = pointer.getLIRKind();
+        assert inputKind.getPlatformKind() == AArch64Kind.DWORD;
+        if (inputKind.isReference(0)) {
+            // oop
+            Variable result = newVariable(LIRKind.reference(AArch64Kind.QWORD));
+            append(new AArch64HotSpotMove.UncompressPointer(result, asAllocatable(pointer), getProviders().getRegisters().getHeapBaseRegister().asValue(), encoding, nonNull));
+            return result;
+        } else {
+            // metaspace pointer
+            Variable result = newVariable(LIRKind.value(AArch64Kind.QWORD));
+            AllocatableValue base = Value.ILLEGAL;
+            if (encoding.base != 0) {
+                base = emitLoadConstant(LIRKind.value(AArch64Kind.QWORD), JavaConstant.forLong(encoding.base));
+            }
+            append(new AArch64HotSpotMove.UncompressPointer(result, asAllocatable(pointer), base, encoding, nonNull));
+            return result;
+        }
     }
 
     @Override
     public void emitPrefetchAllocate(Value address) {
         // TODO (das) Optimization for later.
+        throw JVMCIError.unimplemented();
     }
 
     @Override
--- a/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotMove.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotMove.java	Fri Jan 22 13:50:04 2016 -1000
@@ -25,20 +25,59 @@
 import static com.oracle.graal.lir.LIRInstruction.OperandFlag.HINT;
 import static com.oracle.graal.lir.LIRInstruction.OperandFlag.ILLEGAL;
 import static com.oracle.graal.lir.LIRInstruction.OperandFlag.REG;
+import static com.oracle.graal.lir.LIRInstruction.OperandFlag.STACK;
 import static jdk.vm.ci.code.ValueUtil.asRegister;
 
 import com.oracle.graal.asm.aarch64.AArch64Assembler;
 import com.oracle.graal.asm.aarch64.AArch64MacroAssembler;
 import com.oracle.graal.lir.LIRInstructionClass;
+import com.oracle.graal.lir.StandardOp.LoadConstantOp;
 import com.oracle.graal.lir.aarch64.AArch64LIRInstruction;
 import com.oracle.graal.lir.asm.CompilationResultBuilder;
 
 import jdk.vm.ci.code.Register;
+import jdk.vm.ci.common.JVMCIError;
+import jdk.vm.ci.hotspot.HotSpotConstant;
 import jdk.vm.ci.hotspot.HotSpotVMConfig.CompressEncoding;
 import jdk.vm.ci.meta.AllocatableValue;
+import jdk.vm.ci.meta.Constant;
 
 public class AArch64HotSpotMove {
 
+    public static class LoadHotSpotObjectConstantInline extends AArch64LIRInstruction implements LoadConstantOp {
+        public static final LIRInstructionClass<LoadHotSpotObjectConstantInline> TYPE = LIRInstructionClass.create(LoadHotSpotObjectConstantInline.class);
+
+        private HotSpotConstant constant;
+        @Def({REG, STACK}) AllocatableValue result;
+
+        public LoadHotSpotObjectConstantInline(HotSpotConstant constant, AllocatableValue result) {
+            super(TYPE);
+            this.constant = constant;
+            this.result = result;
+        }
+
+        @Override
+        protected void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
+            crb.recordInlineDataInCode(constant);
+            if (constant.isCompressed()) {
+                // masm.forceMov(asRegister(result), 0);
+                throw JVMCIError.unimplemented();
+            } else {
+                masm.forceMov(asRegister(result), 0);
+            }
+        }
+
+        @Override
+        public AllocatableValue getResult() {
+            return result;
+        }
+
+        @Override
+        public Constant getConstant() {
+            return constant;
+        }
+    }
+
     /**
      * Compresses a 8-byte pointer as a 4-byte int.
      */
@@ -127,6 +166,7 @@
             } else {
                 // if ptr is null it has to be null after decompression
                 // masm.cmp(64, );
+                throw JVMCIError.unimplemented();
             }
 
         }
@@ -149,8 +189,8 @@
     public static void decodeKlassPointer(AArch64MacroAssembler masm, Register result, Register ptr, Register klassBase, CompressEncoding encoding) {
         // result = klassBase + ptr << shift
         if (encoding.shift != 0 || encoding.base != 0) {
-            // (shift != 0 -> shift == alignment) && (shift == 0 -> base == 0)
-            assert (encoding.shift == 0 || encoding.shift == encoding.alignment) && (encoding.shift != 0 || encoding.base == 0) : "Decode algorithm is wrong.";
+            // (shift != 0 -> shift == alignment)
+            assert (encoding.shift == 0 || encoding.shift == encoding.alignment) : "Decode algorithm is wrong: " + encoding;
             masm.add(64, result, klassBase, ptr, AArch64Assembler.ExtendType.UXTX, encoding.shift);
         }
     }
--- a/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotMoveFactory.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotMoveFactory.java	Fri Jan 22 13:50:04 2016 -1000
@@ -26,11 +26,9 @@
 import static jdk.vm.ci.meta.JavaConstant.INT_0;
 import static jdk.vm.ci.meta.JavaConstant.LONG_0;
 
-import com.oracle.graal.compiler.aarch64.AArch64LIRGenerator.ConstantTableBaseProvider;
 import com.oracle.graal.compiler.aarch64.AArch64MoveFactory;
 import com.oracle.graal.lir.LIRInstruction;
 
-import jdk.vm.ci.common.JVMCIError;
 import jdk.vm.ci.hotspot.HotSpotCompressedNullConstant;
 import jdk.vm.ci.hotspot.HotSpotConstant;
 import jdk.vm.ci.hotspot.HotSpotObjectConstant;
@@ -40,10 +38,6 @@
 
 public class AArch64HotSpotMoveFactory extends AArch64MoveFactory {
 
-    public AArch64HotSpotMoveFactory(ConstantTableBaseProvider constantTableBaseProvider) {
-        super(constantTableBaseProvider);
-    }
-
     @Override
     public boolean canInlineConstant(JavaConstant c) {
         if (HotSpotCompressedNullConstant.COMPRESSED_NULL.equals(c)) {
@@ -68,12 +62,12 @@
         if (usedSource instanceof HotSpotConstant) {
             HotSpotConstant constant = (HotSpotConstant) usedSource;
             if (constant.isCompressed()) {
-                // return new SPARCHotSpotMove.LoadHotSpotObjectConstantInline(constant, dst);
-                throw JVMCIError.unimplemented();
+                return new AArch64HotSpotMove.LoadHotSpotObjectConstantInline(constant, dst);
             } else {
+                // XXX Do we need the constant table?
                 // return new SPARCHotSpotMove.LoadHotSpotObjectConstantFromTable(constant, dst,
                 // constantTableBaseProvider.getConstantTableBase());
-                throw JVMCIError.unimplemented();
+                return new AArch64HotSpotMove.LoadHotSpotObjectConstantInline(constant, dst);
             }
         } else {
             return super.createLoad(dst, usedSource);
--- a/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotNodeLIRBuilder.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotNodeLIRBuilder.java	Fri Jan 22 13:50:04 2016 -1000
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2015, 2016, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -29,21 +29,6 @@
 import static jdk.vm.ci.hotspot.aarch64.AArch64HotSpotRegisterConfig.fp;
 import static jdk.vm.ci.hotspot.aarch64.AArch64HotSpotRegisterConfig.inlineCacheRegister;
 import static jdk.vm.ci.hotspot.aarch64.AArch64HotSpotRegisterConfig.metaspaceMethodRegister;
-import jdk.vm.ci.aarch64.AArch64Kind;
-import jdk.vm.ci.amd64.AMD64Kind;
-import jdk.vm.ci.code.BytecodeFrame;
-import jdk.vm.ci.code.CallingConvention;
-import jdk.vm.ci.code.Register;
-import jdk.vm.ci.code.RegisterValue;
-import jdk.vm.ci.code.StackSlot;
-import jdk.vm.ci.code.ValueUtil;
-import jdk.vm.ci.common.JVMCIError;
-import jdk.vm.ci.hotspot.HotSpotCallingConventionType;
-import jdk.vm.ci.hotspot.HotSpotResolvedJavaMethod;
-import jdk.vm.ci.meta.AllocatableValue;
-import jdk.vm.ci.meta.JavaType;
-import jdk.vm.ci.meta.LIRKind;
-import jdk.vm.ci.meta.Value;
 
 import com.oracle.graal.compiler.aarch64.AArch64NodeLIRBuilder;
 import com.oracle.graal.compiler.aarch64.AArch64NodeMatchRules;
@@ -62,8 +47,8 @@
 import com.oracle.graal.lir.aarch64.AArch64BreakpointOp;
 import com.oracle.graal.lir.aarch64.AArch64Move.CompareAndSwap;
 import com.oracle.graal.lir.gen.LIRGeneratorTool;
+import com.oracle.graal.nodes.BreakpointNode;
 import com.oracle.graal.nodes.CallTargetNode.InvokeKind;
-import com.oracle.graal.nodes.BreakpointNode;
 import com.oracle.graal.nodes.DirectCallTargetNode;
 import com.oracle.graal.nodes.FullInfopointNode;
 import com.oracle.graal.nodes.IndirectCallTargetNode;
@@ -73,6 +58,20 @@
 import com.oracle.graal.nodes.ValueNode;
 import com.oracle.graal.nodes.spi.NodeValueMap;
 
+import jdk.vm.ci.aarch64.AArch64Kind;
+import jdk.vm.ci.code.BytecodeFrame;
+import jdk.vm.ci.code.CallingConvention;
+import jdk.vm.ci.code.Register;
+import jdk.vm.ci.code.RegisterValue;
+import jdk.vm.ci.code.StackSlot;
+import jdk.vm.ci.code.ValueUtil;
+import jdk.vm.ci.hotspot.HotSpotCallingConventionType;
+import jdk.vm.ci.hotspot.HotSpotResolvedJavaMethod;
+import jdk.vm.ci.meta.AllocatableValue;
+import jdk.vm.ci.meta.JavaType;
+import jdk.vm.ci.meta.LIRKind;
+import jdk.vm.ci.meta.Value;
+
 /**
  * LIR generator specialized for AArch64 HotSpot.
  */
@@ -108,8 +107,8 @@
                 }
             }
         }
-        params[params.length - 2] = fp.asValue(LIRKind.value(AMD64Kind.QWORD));
-        params[params.length - 1] = lr.asValue(LIRKind.value(AMD64Kind.QWORD));
+        params[params.length - 2] = fp.asValue(LIRKind.value(AArch64Kind.QWORD));
+        params[params.length - 1] = lr.asValue(LIRKind.value(AArch64Kind.QWORD));
 
         gen.emitIncomingValues(params);
 
@@ -153,7 +152,7 @@
 
     @Override
     public void emitPatchReturnAddress(ValueNode address) {
-        throw JVMCIError.unimplemented();
+        append(new AArch64HotSpotPatchReturnAddressOp(gen.load(operand(address))));
     }
 
     @Override
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.hotspot.aarch64/src/com/oracle/graal/hotspot/aarch64/AArch64HotSpotPatchReturnAddressOp.java	Fri Jan 22 13:50:04 2016 -1000
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.hotspot.aarch64;
+
+import static com.oracle.graal.lir.LIRInstruction.OperandFlag.REG;
+import static jdk.vm.ci.aarch64.AArch64.sp;
+import static jdk.vm.ci.code.ValueUtil.asRegister;
+
+import com.oracle.graal.asm.aarch64.AArch64Address;
+import com.oracle.graal.asm.aarch64.AArch64MacroAssembler;
+import com.oracle.graal.asm.aarch64.AArch64MacroAssembler.AArch64ExceptionCode;
+import com.oracle.graal.lir.LIRInstructionClass;
+import com.oracle.graal.lir.Opcode;
+import com.oracle.graal.lir.aarch64.AArch64LIRInstruction;
+import com.oracle.graal.lir.asm.CompilationResultBuilder;
+
+import jdk.vm.ci.meta.AllocatableValue;
+
+/**
+ * Patch the return address of the current frame.
+ */
+@Opcode("PATCH_RETURN")
+final class AArch64HotSpotPatchReturnAddressOp extends AArch64LIRInstruction {
+
+    public static final LIRInstructionClass<AArch64HotSpotPatchReturnAddressOp> TYPE = LIRInstructionClass.create(AArch64HotSpotPatchReturnAddressOp.class);
+
+    @Use(REG) AllocatableValue address;
+
+    AArch64HotSpotPatchReturnAddressOp(AllocatableValue address) {
+        super(TYPE);
+        this.address = address;
+    }
+
+    @Override
+    public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
+        final int frameSize = crb.frameMap.frameSize();
+        // XXX where is lr exactly?
+        AArch64Address lrAddress = AArch64Address.createUnscaledImmediateAddress(sp, frameSize);
+        masm.brk(AArch64ExceptionCode.BREAKPOINT); // XXX
+        masm.str(64, asRegister(address), lrAddress);
+    }
+}
--- a/graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64AddressValue.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64AddressValue.java	Fri Jan 22 13:50:04 2016 -1000
@@ -32,7 +32,9 @@
 import jdk.vm.ci.meta.Value;
 
 import com.oracle.graal.asm.aarch64.AArch64Address;
+import com.oracle.graal.asm.aarch64.AArch64Address.AddressingMode;
 import com.oracle.graal.asm.aarch64.AArch64Assembler;
+import com.oracle.graal.asm.aarch64.AArch64Assembler.ExtendType;
 import com.oracle.graal.lir.CompositeValue;
 import com.oracle.graal.lir.InstructionValueConsumer;
 import com.oracle.graal.lir.InstructionValueProcedure;
@@ -45,13 +47,14 @@
     @Component({OperandFlag.REG, OperandFlag.ILLEGAL}) protected AllocatableValue base;
     @Component({OperandFlag.REG, OperandFlag.ILLEGAL}) protected AllocatableValue offset;
     private final int immediate;
+
     /**
      * Whether register offset should be scaled or not.
      */
     private final boolean scaled;
-    private final AArch64Address.AddressingMode addressingMode;
+    private final AddressingMode addressingMode;
 
-    public AArch64AddressValue(LIRKind kind, AllocatableValue base, AllocatableValue offset, int immediate, boolean scaled, AArch64Address.AddressingMode addressingMode) {
+    public AArch64AddressValue(LIRKind kind, AllocatableValue base, AllocatableValue offset, int immediate, boolean scaled, AddressingMode addressingMode) {
         super(kind);
         this.base = base;
         this.offset = offset;
@@ -84,14 +87,14 @@
         return scaled;
     }
 
-    public AArch64Address.AddressingMode getAddressingMode() {
+    public AddressingMode getAddressingMode() {
         return addressingMode;
     }
 
     public AArch64Address toAddress() {
         Register baseReg = toRegister(base);
         Register offsetReg = toRegister(offset);
-        AArch64Assembler.ExtendType extendType = addressingMode == AArch64Address.AddressingMode.EXTENDED_REGISTER_OFFSET ? AArch64Assembler.ExtendType.SXTW : null;
+        AArch64Assembler.ExtendType extendType = addressingMode == AddressingMode.EXTENDED_REGISTER_OFFSET ? ExtendType.SXTW : null;
         return AArch64Address.createAddress(addressingMode, baseReg, offsetReg, immediate, scaled, extendType);
     }
 
--- a/graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64ArithmeticOp.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64ArithmeticOp.java	Fri Jan 22 13:50:04 2016 -1000
@@ -205,7 +205,7 @@
     }
 
     public static class BinaryOp extends AArch64LIRInstruction {
-        private static final LIRInstructionClass<BinaryConstOp> TYPE = LIRInstructionClass.create(BinaryConstOp.class);
+        private static final LIRInstructionClass<BinaryOp> TYPE = LIRInstructionClass.create(BinaryOp.class);
 
         @Opcode private final AArch64ArithmeticOp op;
         @Def({REG}) protected AllocatableValue result;
--- a/graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64BlockEndOp.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64BlockEndOp.java	Fri Jan 22 13:50:04 2016 -1000
@@ -25,14 +25,14 @@
 
 import com.oracle.graal.asm.aarch64.AArch64MacroAssembler;
 import com.oracle.graal.lir.LIRInstructionClass;
-import com.oracle.graal.lir.StandardOp;
+import com.oracle.graal.lir.StandardOp.AbstractBlockEndOp;
 import com.oracle.graal.lir.asm.CompilationResultBuilder;
 
-public abstract class AArch64BlockEndOp extends StandardOp.AbstractBlockEndOp {
+public abstract class AArch64BlockEndOp extends AbstractBlockEndOp {
 
     public static final LIRInstructionClass<AArch64BlockEndOp> TYPE = LIRInstructionClass.create(AArch64BlockEndOp.class);
 
-    protected AArch64BlockEndOp(LIRInstructionClass<? extends StandardOp.AbstractBlockEndOp> c) {
+    protected AArch64BlockEndOp(LIRInstructionClass<? extends AArch64BlockEndOp> c) {
         super(c);
     }
 
@@ -41,5 +41,5 @@
         emitCode(crb, (AArch64MacroAssembler) crb.asm);
     }
 
-    public abstract void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm);
+    protected abstract void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm);
 }
--- a/graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64LIRInstruction.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64LIRInstruction.java	Fri Jan 22 13:50:04 2016 -1000
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -37,5 +37,5 @@
         emitCode(crb, (AArch64MacroAssembler) crb.asm);
     }
 
-    public abstract void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm);
+    protected abstract void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm);
 }
--- a/graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64Move.java	Thu Jan 21 14:24:24 2016 -0800
+++ b/graal/com.oracle.graal.lir.aarch64/src/com/oracle/graal/lir/aarch64/AArch64Move.java	Fri Jan 22 13:50:04 2016 -1000
@@ -23,6 +23,7 @@
 package com.oracle.graal.lir.aarch64;
 
 import static com.oracle.graal.lir.LIRInstruction.OperandFlag.COMPOSITE;
+import static com.oracle.graal.lir.LIRInstruction.OperandFlag.HINT;
 import static com.oracle.graal.lir.LIRInstruction.OperandFlag.REG;
 import static com.oracle.graal.lir.LIRInstruction.OperandFlag.STACK;
 import static com.oracle.graal.lir.LIRInstruction.OperandFlag.UNINITIALIZED;
@@ -34,6 +35,22 @@
 import static jdk.vm.ci.code.ValueUtil.asStackSlot;
 import static jdk.vm.ci.code.ValueUtil.isRegister;
 import static jdk.vm.ci.code.ValueUtil.isStackSlot;
+
+import com.oracle.graal.asm.Label;
+import com.oracle.graal.asm.aarch64.AArch64Address;
+import com.oracle.graal.asm.aarch64.AArch64Assembler;
+import com.oracle.graal.asm.aarch64.AArch64MacroAssembler;
+import com.oracle.graal.asm.aarch64.AArch64MacroAssembler.ScratchRegister;
+import com.oracle.graal.lir.LIRFrameState;
+import com.oracle.graal.lir.LIRInstructionClass;
+import com.oracle.graal.lir.Opcode;
+import com.oracle.graal.lir.StandardOp;
+import com.oracle.graal.lir.StandardOp.LoadConstantOp;
+import com.oracle.graal.lir.StandardOp.NullCheck;
+import com.oracle.graal.lir.StandardOp.ValueMoveOp;
+import com.oracle.graal.lir.VirtualStackSlot;
+import com.oracle.graal.lir.asm.CompilationResultBuilder;
+
 import jdk.vm.ci.aarch64.AArch64;
 import jdk.vm.ci.aarch64.AArch64Kind;
 import jdk.vm.ci.code.Register;
@@ -42,32 +59,51 @@
 import jdk.vm.ci.meta.AllocatableValue;
 import jdk.vm.ci.meta.Constant;
 import jdk.vm.ci.meta.JavaConstant;
+import jdk.vm.ci.meta.LIRKind;
 import jdk.vm.ci.meta.PlatformKind;
 import jdk.vm.ci.meta.Value;
 
-import com.oracle.graal.asm.Label;
-import com.oracle.graal.asm.aarch64.AArch64Address;
-import com.oracle.graal.asm.aarch64.AArch64Assembler;
-import com.oracle.graal.asm.aarch64.AArch64MacroAssembler;
-import com.oracle.graal.lir.LIRFrameState;
-import com.oracle.graal.lir.LIRInstructionClass;
-import com.oracle.graal.lir.Opcode;
-import com.oracle.graal.lir.StandardOp;
-import com.oracle.graal.lir.StandardOp.NullCheck;
-import com.oracle.graal.lir.StandardOp.ValueMoveOp;
-import com.oracle.graal.lir.VirtualStackSlot;
-import com.oracle.graal.lir.asm.CompilationResultBuilder;
-
 public class AArch64Move {
 
+    public static class LoadInlineConstant extends AArch64LIRInstruction implements LoadConstantOp {
+        public static final LIRInstructionClass<LoadInlineConstant> TYPE = LIRInstructionClass.create(LoadInlineConstant.class);
+
+        private JavaConstant constant;
+        @Def({REG, STACK}) AllocatableValue result;
+
+        public LoadInlineConstant(JavaConstant constant, AllocatableValue result) {
+            super(TYPE);
+            this.constant = constant;
+            this.result = result;
+        }
+
+        @Override
+        public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
+            if (isRegister(result)) {
+                const2reg(crb, masm, result, constant);
+            } else if (isStackSlot(result)) {
+                StackSlot slot = asStackSlot(result);
+                const2stack(crb, masm, slot, constant);
+            }
+        }
+
+        public Constant getConstant() {
+            return constant;
+        }
+
+        public AllocatableValue getResult() {
+            return result;
+        }
+    }
+
     @Opcode("MOVE")
-    public static class MoveToRegOp extends AArch64LIRInstruction implements ValueMoveOp {
-        public static final LIRInstructionClass<MoveToRegOp> TYPE = LIRInstructionClass.create(MoveToRegOp.class);
+    public static class Move extends AArch64LIRInstruction implements ValueMoveOp {
+        public static final LIRInstructionClass<Move> TYPE = LIRInstructionClass.create(Move.class);
 
-        @Def protected AllocatableValue result;
+        @Def({REG, STACK, HINT}) protected AllocatableValue result;
         @Use({REG, STACK}) protected AllocatableValue input;
 
-        public MoveToRegOp(AllocatableValue result, AllocatableValue input) {
+        public Move(AllocatableValue result, AllocatableValue input) {
             super(TYPE);
             this.result = result;
             this.input = input;
@@ -89,69 +125,6 @@
         }
     }
 
-    /**
-     * If the destination is a StackSlot we cannot have a StackSlot or Constant as the source, hence
-     * we have to special case this particular combination. Note: We allow a register as the
-     * destination too just to give the register allocator more freedom.
-     */
-    @Opcode("MOVE")
-    public static class MoveToStackOp extends AArch64LIRInstruction implements StandardOp.ValueMoveOp {
-        public static final LIRInstructionClass<MoveToStackOp> TYPE = LIRInstructionClass.create(MoveToStackOp.class);
-
-        @Def({STACK, REG}) protected AllocatableValue result;
-        @Use protected AllocatableValue input;
-
-        public MoveToStackOp(AllocatableValue result, AllocatableValue input) {
-            super(TYPE);
-            this.result = result;
-            this.input = input;
-        }
-
-        @Override
-        public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
-            move(crb, masm, getResult(), getInput());
-        }
-
-        @Override
-        public AllocatableValue getInput() {
-            return input;
-        }
-
-        @Override
-        public AllocatableValue getResult() {
-            return result;
-        }
-    }
-
-    @Opcode("MOVE")
-    public static class MoveFromConstOp extends AArch64LIRInstruction implements StandardOp.LoadConstantOp {
-        public static final LIRInstructionClass<MoveFromConstOp> TYPE = LIRInstructionClass.create(MoveFromConstOp.class);
-
-        @Def protected AllocatableValue result;
-        private final JavaConstant input;
-
-        public MoveFromConstOp(AllocatableValue result, JavaConstant input) {
-            super(TYPE);
-            this.result = result;
-            this.input = input;
-        }
-
-        @Override
-        public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
-            const2reg(crb, masm, result, input);
-        }
-
-        @Override
-        public Constant getConstant() {
-            return input;
-        }
-
-        @Override
-        public AllocatableValue getResult() {
-            return result;
-        }
-    }
-
     public static class LoadAddressOp extends AArch64LIRInstruction {
         public static final LIRInstructionClass<LoadAddressOp> TYPE = LIRInstructionClass.create(LoadAddressOp.class);
 
@@ -187,7 +160,7 @@
         @Override
         public void emitCode(CompilationResultBuilder crb, AArch64MacroAssembler masm) {
             Register dst = asRegister(result);
-            int alignment = 16;
+            final int alignment = 8;
             masm.loadAddress(dst, (AArch64Address) crb.recordDataReferenceInCode(data, alignment), alignment);
         }
     }
@@ -485,7 +458,7 @@
         }
     }
 
-    private static void const2reg(CompilationResultBuilder crb, AArch64MacroAssembler masm, AllocatableValue result, JavaConstant input) {
+    private static void const2reg(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value result, JavaConstant input) {
         Register dst = asRegister(result);
         switch (input.getJavaKind().getStackKind()) {
             case Int:
@@ -523,6 +496,24 @@
         }
     }
 
+    private static void const2stack(CompilationResultBuilder crb, AArch64MacroAssembler masm, Value result, JavaConstant constant) {
+        if (constant.isDefaultForKind() || constant.isNull()) {
+            AArch64Address resultAddress = (AArch64Address) crb.asAddress(result);
+            // emitStore(g0.asValue(LIRKind.combine(result)), resultAddress,
+            // result.getPlatformKind(), null, crb, masm);
+            throw JVMCIError.unimplemented("" + resultAddress);
+        } else {
+            try (ScratchRegister sc = masm.getScratchRegister()) {
+                Value scratchRegisterValue = sc.getRegister().asValue(LIRKind.combine(result));
+                const2reg(crb, masm, scratchRegisterValue, constant);
+                AArch64Address resultAddress = (AArch64Address) crb.asAddress(result);
+                // emitStore(scratchRegisterValue, resultAddress, result.getPlatformKind(), null,
+                // crb, masm);
+                throw JVMCIError.unimplemented("" + resultAddress);
+            }
+        }
+    }
+
     /**
      * Returns AArch64Address of given StackSlot. We cannot use CompilationResultBuilder.asAddress
      * since this calls AArch64MacroAssembler.makeAddress with displacements that may be larger than
@@ -538,11 +529,10 @@
      * @return AArch64Address of given StackSlot. Uses scratch register if necessary to do so.
      */
     private static AArch64Address loadStackSlotAddress(CompilationResultBuilder crb, AArch64MacroAssembler masm, StackSlot slot, AllocatableValue scratch) {
-        AArch64Kind kind = (AArch64Kind) scratch.getPlatformKind();
-        assert kind.isInteger();
         int displacement = crb.frameMap.offsetForStackSlot(slot);
         int transferSize = slot.getPlatformKind().getSizeInBytes();
         Register scratchReg = Value.ILLEGAL.equals(scratch) ? AArch64.zr : asRegister(scratch);
         return masm.makeAddress(AArch64.sp, displacement, scratchReg, transferSize, /* allowOverwrite */false);
     }
+
 }