changeset 5846:da0eff406c2c

Merge
author Gilles Duboscq <duboscq@ssw.jku.at>
date Tue, 17 Jul 2012 20:07:53 +0200
parents 421e767d8038 (current diff) a432e6d43aa1 (diff)
children d6257bd5e7f2
files
diffstat 12 files changed, 281 insertions(+), 118 deletions(-) [+]
line wrap: on
line diff
--- a/graal/com.oracle.graal.compiler/src/com/oracle/graal/compiler/GraalOptions.java	Tue Jul 17 20:07:00 2012 +0200
+++ b/graal/com.oracle.graal.compiler/src/com/oracle/graal/compiler/GraalOptions.java	Tue Jul 17 20:07:53 2012 +0200
@@ -181,7 +181,7 @@
     public static boolean GenSafepoints                      = true;
     public static boolean GenLoopSafepoints                  = true;
            static boolean UseTypeCheckHints                  = true;
-    public static boolean InlineVTableStubs                  = ____;
+    public static boolean InlineVTableStubs                  = true;
     public static boolean AlwaysInlineVTableStubs            = ____;
 
     public static boolean GenAssertionCode                   = ____;
--- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotRuntime.java	Tue Jul 17 20:07:00 2012 +0200
+++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotRuntime.java	Tue Jul 17 20:07:53 2012 +0200
@@ -246,13 +246,10 @@
                     GraalOptions.InlineVTableStubs &&
                     (GraalOptions.AlwaysInlineVTableStubs || invoke.isMegamorphic())) {
 
-                    // TODO (dnsimon) I'm not sure of other invariants of HotSpot's calling conventions that may
-                    // be required for register indirect calls.
-                    assert false : "HotSpot expects the methodOop of the callee to be in rbx - this is yet to be implemented for inline vtable dispatch";
-
-                    // TODO: successive inlined invokevirtuals to the same method cause register allocation to fail - fix this!
                     HotSpotResolvedJavaMethod hsMethod = (HotSpotResolvedJavaMethod) callTarget.targetMethod();
                     if (!hsMethod.holder().isInterface()) {
+                        // We use LocationNode.ANY_LOCATION for the reads that access the vtable entry and the compiled code entry
+                        // as HotSpot does not guarantee they are final values.
                         int vtableEntryOffset = hsMethod.vtableEntryOffset();
                         assert vtableEntryOffset != 0;
                         SafeReadNode hub = safeReadHub(graph, receiver, StructuredGraph.INVALID_GRAPH_ID);
@@ -260,7 +257,11 @@
                         Stamp nonNullWordStamp = StampFactory.forWord(wordKind, true);
                         ReadNode methodOop = graph.add(new ReadNode(hub, LocationNode.create(LocationNode.ANY_LOCATION, wordKind, vtableEntryOffset, graph), nonNullWordStamp));
                         ReadNode compiledEntry = graph.add(new ReadNode(methodOop, LocationNode.create(LocationNode.ANY_LOCATION, wordKind, config.methodCompiledEntryOffset, graph), nonNullWordStamp));
-                        callTarget.setAddress(compiledEntry);
+                        callTarget.setComputedAddress(compiledEntry);
+
+                        // Append the methodOop to the arguments so that it can be explicitly passed in RBX as
+                        // is required for all compiled calls in HotSpot.
+                        callTarget.arguments().add(methodOop);
 
                         graph.addBeforeFixed(invoke.node(), hub);
                         graph.addAfterFixed(hub, methodOop);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/target/amd64/AMD64DirectCallOp.java	Tue Jul 17 20:07:53 2012 +0200
@@ -0,0 +1,105 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.hotspot.target.amd64;
+
+import static com.oracle.graal.hotspot.meta.HotSpotXirGenerator.*;
+import static com.oracle.graal.nodes.java.MethodCallTargetNode.InvokeKind.*;
+
+import com.oracle.graal.api.code.CompilationResult.Mark;
+import com.oracle.graal.api.meta.*;
+import com.oracle.graal.lir.*;
+import com.oracle.graal.lir.LIRInstruction.*;
+import com.oracle.graal.lir.amd64.*;
+import com.oracle.graal.lir.amd64.AMD64Call.DirectCallOp;
+import com.oracle.graal.lir.asm.*;
+import com.oracle.graal.nodes.java.MethodCallTargetNode.InvokeKind;
+import com.oracle.max.asm.*;
+import com.oracle.max.asm.target.amd64.*;
+
+/**
+ * A direct call that complies with the conventions for such calls in HotSpot.
+ * In particular, for calls using an inline cache, a MOVE instruction is
+ * emitted just prior to the aligned direct call. This instruction
+ * (which moves null in RAX) is patched by the C++ Graal code to replace the
+ * null constant with Universe::non_oop_word(), a special sentinel
+ * used for the initial value of the klassOop in an inline cache.
+ * <p>
+ * For non-inline cache calls, a static call stub is emitted.
+ */
+@Opcode("CALL_DIRECT")
+final class AMD64DirectCallOp extends DirectCallOp {
+
+    /**
+     * The mark emitted at the position of the direct call instruction.
+     * This is only recorded for calls that have an associated static
+     * call stub (i.e., {@code invokeKind == Static || invokeKind == Special}).
+     */
+    Mark callsiteMark;
+
+    private final InvokeKind invokeKind;
+
+    AMD64DirectCallOp(Object targetMethod, Value result, Value[] parameters, LIRFrameState state, InvokeKind invokeKind, LIR lir) {
+        super(targetMethod, result, parameters, state, null);
+        this.invokeKind = invokeKind;
+
+        if (invokeKind == Static || invokeKind == Special) {
+            lir.stubs.add(new AMD64Code() {
+                public String description() {
+                    return "static call stub for Invoke" + AMD64DirectCallOp.this.invokeKind;
+                }
+                @Override
+                public void emitCode(TargetMethodAssembler tasm, AMD64MacroAssembler masm) {
+                    assert callsiteMark != null : "static call site has not yet been emitted";
+                    tasm.recordMark(MARK_STATIC_CALL_STUB, callsiteMark);
+                    masm.movq(AMD64.rbx, 0L);
+                    Label dummy = new Label();
+                    masm.jmp(dummy);
+                    masm.bind(dummy);
+                }
+            });
+        }
+
+    }
+
+    @Override
+    public void emitCode(TargetMethodAssembler tasm, AMD64MacroAssembler masm) {
+        if (invokeKind == Static || invokeKind == Special) {
+            tasm.recordMark(invokeKind == Static ? MARK_INVOKESTATIC : MARK_INVOKESPECIAL);
+        } else {
+            assert invokeKind == Virtual || invokeKind == Interface;
+            // The mark for an invocation that uses an inline cache must be placed at the instruction
+            // that loads the klassOop from the inline cache so that the C++ code can find it
+            // and replace the inline null value with Universe::non_oop_word()
+            tasm.recordMark(invokeKind == Virtual ? MARK_INVOKEVIRTUAL : MARK_INVOKEINTERFACE);
+            AMD64Move.move(tasm, masm, AMD64.rax.asValue(Kind.Object), Constant.NULL_OBJECT);
+        }
+
+        emitAlignmentForDirectCall(tasm, masm);
+
+        if (invokeKind == Static || invokeKind == Special) {
+            callsiteMark = tasm.recordMark(null);
+        }
+
+        AMD64Call.directCall(tasm, masm, targetMethod, state);
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/target/amd64/AMD64IndirectCallOp.java	Tue Jul 17 20:07:53 2012 +0200
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+package com.oracle.graal.hotspot.target.amd64;
+
+import static com.oracle.graal.api.code.ValueUtil.*;
+import static com.oracle.graal.hotspot.meta.HotSpotXirGenerator.*;
+import static com.oracle.graal.lir.LIRInstruction.OperandFlag.*;
+
+import com.oracle.graal.api.code.*;
+import com.oracle.graal.api.meta.*;
+import com.oracle.graal.lir.*;
+import com.oracle.graal.lir.LIRInstruction.*;
+import com.oracle.graal.lir.amd64.*;
+import com.oracle.graal.lir.amd64.AMD64Call.IndirectCallOp;
+import com.oracle.graal.lir.asm.*;
+import com.oracle.max.asm.target.amd64.*;
+
+/**
+ * A register indirect call that complies with the extra conventions for such calls in HotSpot.
+ * In particular, the methodOop of the callee must be in RBX for the case where a vtable entry's
+ * _from_compiled_entry is the address of an C2I adapter. Such adapters expect the target
+ * method to be in RBX.
+ */
+@Opcode("CALL_INDIRECT")
+final class AMD64IndirectCallOp extends IndirectCallOp {
+
+    /**
+     * Vtable stubs expect the methodOop in RBX.
+     */
+    public static final Register METHOD_OOP = AMD64.rbx;
+
+    @Use({REG}) protected Value methodOop;
+
+    AMD64IndirectCallOp(Object targetMethod, Value result, Value[] parameters, Value methodOop, Value targetAddress, LIRFrameState state) {
+        super(targetMethod, result, parameters, targetAddress, state, null);
+        this.methodOop = methodOop;
+    }
+
+    @Override
+    public void emitCode(TargetMethodAssembler tasm, AMD64MacroAssembler masm) {
+        tasm.recordMark(MARK_INLINE_INVOKEVIRTUAL);
+        Register callReg = asRegister(targetAddress);
+        assert callReg != METHOD_OOP;
+        AMD64Call.indirectCall(tasm, masm, callReg, targetMethod, state);
+    }
+
+    @Override
+    protected void verify() {
+        super.verify();
+        assert asRegister(methodOop) == METHOD_OOP;
+    }
+}
--- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/target/amd64/HotSpotAMD64Backend.java	Tue Jul 17 20:07:00 2012 +0200
+++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/target/amd64/HotSpotAMD64Backend.java	Tue Jul 17 20:07:53 2012 +0200
@@ -32,7 +32,6 @@
 import java.util.*;
 
 import com.oracle.graal.api.code.*;
-import com.oracle.graal.api.code.CompilationResult.Mark;
 import com.oracle.graal.api.meta.*;
 import com.oracle.graal.compiler.*;
 import com.oracle.graal.compiler.gen.*;
@@ -45,7 +44,6 @@
 import com.oracle.graal.lir.*;
 import com.oracle.graal.lir.amd64.*;
 import com.oracle.graal.lir.asm.*;
-import com.oracle.graal.lir.asm.TargetMethodAssembler.*;
 import com.oracle.graal.nodes.*;
 import com.oracle.graal.nodes.java.*;
 import com.oracle.graal.nodes.java.MethodCallTargetNode.InvokeKind;
@@ -54,6 +52,9 @@
 import com.oracle.max.asm.target.amd64.AMD64Assembler.ConditionFlag;
 import com.oracle.max.cri.xir.*;
 
+/**
+ * HotSpot AMD64 specific backend.
+ */
 public class HotSpotAMD64Backend extends Backend {
 
     public HotSpotAMD64Backend(CodeCacheProvider runtime, TargetDescription target) {
@@ -112,58 +113,37 @@
             Kind[] signature = MetaUtil.signatureToKinds(callTarget.targetMethod().signature(), callTarget.isStatic() ? null : callTarget.targetMethod().holder().kind());
             CallingConvention cc = frameMap.registerConfig.getCallingConvention(JavaCall, signature, target(), false);
             frameMap.callsMethod(cc, JavaCall);
-            List<Value> argList = visitInvokeArguments(cc, callTarget.arguments());
 
-            Value address = callTarget.address() == null ? Constant.forLong(0L) : operand(callTarget.address());
-
-            final Mark[] callsiteForStaticCallStub = {null};
+            ValueNode methodOopNode = null;
+            boolean inlineVirtualCall = false;
+            if (callTarget.computedAddress() != null) {
+                // If a virtual dispatch address was computed, then an extra argument
+                // was append for passing the methodOop in RBX
+                methodOopNode = callTarget.arguments().remove(callTarget.arguments().size() - 1);
 
-            if (invokeKind == Static || invokeKind == Special) {
-                lir.stubs.add(new AMD64Code() {
-                    public String description() {
-                        return "static call stub for Invoke" + invokeKind;
-                    }
-                    @Override
-                    public void emitCode(TargetMethodAssembler tasm, AMD64MacroAssembler masm) {
-                        assert callsiteForStaticCallStub[0] != null;
-                        tasm.recordMark(MARK_STATIC_CALL_STUB, callsiteForStaticCallStub);
-                        masm.movq(AMD64.rbx, 0L);
-                        Label dummy = new Label();
-                        masm.jmp(dummy);
-                        masm.bind(dummy);
-                    }
-                });
+                if (invokeKind == Virtual) {
+                    inlineVirtualCall = true;
+                } else {
+                    // An invokevirtual may have been canonicalized into an invokespecial;
+                    // the methodOop argument is ignored in this case
+                    methodOopNode = null;
+                }
             }
 
-            CallPositionListener cpl = new CallPositionListener() {
-                @Override
-                public void beforeCall(TargetMethodAssembler tasm) {
-                    if (invokeKind == Static || invokeKind == Special) {
-                        tasm.recordMark(invokeKind == Static ? MARK_INVOKESTATIC : MARK_INVOKESPECIAL);
-                    } else {
-                        // The mark for an invocation that uses an inline cache must be placed at the instruction
-                        // that loads the klassOop from the inline cache so that the C++ code can find it
-                        // and replace the inline null value with Universe::non_oop_word()
-                        assert invokeKind == Virtual || invokeKind == Interface;
-                        if (callTarget.address() == null) {
-                            tasm.recordMark(invokeKind == Virtual ? MARK_INVOKEVIRTUAL : MARK_INVOKEINTERFACE);
-                            AMD64MacroAssembler masm = (AMD64MacroAssembler) tasm.asm;
-                            AMD64Move.move(tasm, masm, AMD64.rax.asValue(Kind.Object), Constant.NULL_OBJECT);
-                        } else {
-                            tasm.recordMark(MARK_INLINE_INVOKEVIRTUAL);
-                        }
-                    }
-                }
-                public void atCall(TargetMethodAssembler tasm) {
-                    if (invokeKind == Static || invokeKind == Special) {
-                        callsiteForStaticCallStub[0] = tasm.recordMark(null);
-                    }
-                }
-            };
+            List<Value> argList = visitInvokeArguments(cc, callTarget.arguments());
+            Value[] parameters = argList.toArray(new Value[argList.size()]);
 
             LIRFrameState callState = stateFor(x.stateDuring(), null, x instanceof InvokeWithExceptionNode ? getLIRBlock(((InvokeWithExceptionNode) x).exceptionEdge()) : null, x.leafGraphId());
             Value result = resultOperandFor(x.node().kind());
-            emitCall(callTarget.targetMethod(), result, argList, address, callState, cpl);
+            if (!inlineVirtualCall) {
+                assert methodOopNode == null;
+                append(new AMD64DirectCallOp(callTarget.targetMethod(), result, parameters, callState, invokeKind, lir));
+            } else {
+                assert methodOopNode != null;
+                Value methodOop = AMD64.rbx.asValue();
+                emitMove(operand(methodOopNode), methodOop);
+                append(new AMD64IndirectCallOp(callTarget.targetMethod(), result, parameters, methodOop, operand(callTarget.computedAddress()), callState));
+            }
 
             if (isLegal(result)) {
                 setResult(x.node(), emitMove(result));
--- a/graal/com.oracle.graal.lir.amd64/src/com/oracle/graal/lir/amd64/AMD64Call.java	Tue Jul 17 20:07:00 2012 +0200
+++ b/graal/com.oracle.graal.lir.amd64/src/com/oracle/graal/lir/amd64/AMD64Call.java	Tue Jul 17 20:07:53 2012 +0200
@@ -54,9 +54,28 @@
 
         @Override
         public void emitCode(TargetMethodAssembler tasm, AMD64MacroAssembler masm) {
-            callAlignment(tasm, masm, callPositionListener);
+            if (callPositionListener != null) {
+                callPositionListener.beforeCall(tasm);
+            }
+
+            emitAlignmentForDirectCall(tasm, masm);
+
+            if (callPositionListener != null) {
+                int pos = masm.codeBuffer.position();
+                callPositionListener.atCall(tasm);
+                assert pos == masm.codeBuffer.position() : "call position listener inserted code before an aligned call";
+            }
             directCall(tasm, masm, targetMethod, state);
         }
+
+        protected void emitAlignmentForDirectCall(TargetMethodAssembler tasm, AMD64MacroAssembler masm) {
+            // make sure that the displacement word of the call ends up word aligned
+            int offset = masm.codeBuffer.position();
+            offset += tasm.target.arch.machineCodeCallDisplacementOffset;
+            while (offset++ % tasm.target.wordSize != 0) {
+                masm.nop();
+            }
+        }
     }
 
     @Opcode("CALL_INDIRECT")
@@ -66,7 +85,7 @@
         @Use({REG}) protected Value targetAddress;
         @State protected LIRFrameState state;
 
-        private final Object targetMethod;
+        protected final Object targetMethod;
         protected final CallPositionListener callPositionListener;
 
         public IndirectCallOp(Object targetMethod, Value result, Value[] parameters, Value targetAddress, LIRFrameState state, CallPositionListener callPositionListener) {
@@ -88,25 +107,6 @@
         }
     }
 
-    public static void callAlignment(TargetMethodAssembler tasm, AMD64MacroAssembler masm, CallPositionListener callPositionListener) {
-        if (callPositionListener != null) {
-            callPositionListener.beforeCall(tasm);
-        }
-
-        // make sure that the displacement word of the call ends up word aligned
-        int offset = masm.codeBuffer.position();
-        offset += tasm.target.arch.machineCodeCallDisplacementOffset;
-        while (offset++ % tasm.target.wordSize != 0) {
-            masm.nop();
-        }
-
-        if (callPositionListener != null) {
-            int pos = masm.codeBuffer.position();
-            callPositionListener.atCall(tasm);
-            assert pos == masm.codeBuffer.position() : "call position listener inserted code before an aligned call";
-        }
-    }
-
     public static void directCall(TargetMethodAssembler tasm, AMD64MacroAssembler masm, Object target, LIRFrameState info) {
         int before = masm.codeBuffer.position();
         if (target instanceof RuntimeCall) {
--- a/graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/CallTargetNode.java	Tue Jul 17 20:07:00 2012 +0200
+++ b/graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/CallTargetNode.java	Tue Jul 17 20:07:53 2012 +0200
@@ -31,15 +31,18 @@
 
     @Input protected final NodeInputList<ValueNode> arguments;
 
-    @Input protected ValueNode address;
+    /**
+     * The address computation for an indirect call (e.g., invokevirtual or invokeinterface).
+     */
+    @Input protected ValueNode computedAddress;
 
-    public ValueNode address() {
-        return address;
+    public ValueNode computedAddress() {
+        return computedAddress;
     }
 
-    public void setAddress(ValueNode address) {
-        updateUsages(this.address, address);
-        this.address = address;
+    public void setComputedAddress(ValueNode address) {
+        updateUsages(this.computedAddress, address);
+        this.computedAddress = address;
     }
 
     public CallTargetNode(ValueNode[] arguments) {
--- a/src/cpu/x86/vm/nativeInst_x86.hpp	Tue Jul 17 20:07:00 2012 +0200
+++ b/src/cpu/x86/vm/nativeInst_x86.hpp	Tue Jul 17 20:07:53 2012 +0200
@@ -186,21 +186,17 @@
  public:
   enum Intel_specific_constants {
     instruction_code            = 0xFF,
-    instruction_size            =    2,
     instruction_offset          =    0,
-    return_address_offset       =    2
+    return_address_offset_norex =    2,
+    return_address_offset_rex   =    3
   };
 
-  address instruction_address() const       { return addr_at(instruction_offset); }
-  address next_instruction_address() const  { return addr_at(return_address_offset); }
-
-
-  static bool is_call_reg_at(address instr) {
-    return ((*instr) & 0xFF) == NativeCallReg::instruction_code;
-  }
-
-  static bool is_call_reg_before(address return_address) {
-    return is_call_reg_at(return_address - NativeCallReg::return_address_offset);
+  int next_instruction_offset() const  {
+    if (ubyte_at(0) == NativeCallReg::instruction_code) {
+      return return_address_offset_norex;
+    } else {
+      return return_address_offset_rex;
+    }
   }
 };
 
@@ -555,7 +551,9 @@
 
 inline bool NativeInstruction::is_illegal()      { return (short)int_at(0) == (short)NativeIllegalInstruction::instruction_code; }
 inline bool NativeInstruction::is_call()         { return ubyte_at(0) == NativeCall::instruction_code; }
-inline bool NativeInstruction::is_call_reg()     { return ubyte_at(0) == NativeCallReg::instruction_code; }
+inline bool NativeInstruction::is_call_reg()     { return ubyte_at(0) == NativeCallReg::instruction_code ||
+                                                          (ubyte_at(1) == NativeCallReg::instruction_code &&
+                                                           (ubyte_at(0) == Assembler::REX || ubyte_at(0) == Assembler::REX_B)); }
 inline bool NativeInstruction::is_return()       { return ubyte_at(0) == NativeReturn::instruction_code ||
                                                           ubyte_at(0) == NativeReturnX::instruction_code; }
 inline bool NativeInstruction::is_jump()         { return ubyte_at(0) == NativeJump::instruction_code ||
--- a/src/share/vm/graal/graalCodeInstaller.cpp	Tue Jul 17 20:07:00 2012 +0200
+++ b/src/share/vm/graal/graalCodeInstaller.cpp	Tue Jul 17 20:07:53 2012 +0200
@@ -273,22 +273,22 @@
 }
 
 // constructor used to create a method
-CodeInstaller::CodeInstaller(Handle& target_method, nmethod*& nm, bool install_code) {
+CodeInstaller::CodeInstaller(Handle& comp_result, nmethod*& nm, bool install_code) {
   _env = CURRENT_ENV;
   GraalCompiler::initialize_buffer_blob();
   CodeBuffer buffer(JavaThread::current()->get_buffer_blob());
-  jobject target_method_obj = JNIHandles::make_local(target_method());
-  initialize_assumptions(JNIHandles::resolve(target_method_obj));
+  jobject comp_result_obj = JNIHandles::make_local(comp_result());
+  initialize_assumptions(JNIHandles::resolve(comp_result_obj));
 
   {
     No_Safepoint_Verifier no_safepoint;
-    initialize_fields(JNIHandles::resolve(target_method_obj));
+    initialize_fields(JNIHandles::resolve(comp_result_obj));
     initialize_buffer(buffer);
     process_exception_handlers();
   }
 
   int stack_slots = _total_frame_size / HeapWordSize; // conversion to words
-  methodHandle method = getMethodFromHotSpotMethod(HotSpotCompilationResult::method(JNIHandles::resolve(target_method_obj))); 
+  methodHandle method = getMethodFromHotSpotMethod(HotSpotCompilationResult::method(JNIHandles::resolve(comp_result_obj))); 
 
   nm = GraalEnv::register_method(method, -1, &_offsets, _custom_stack_area_offset, &buffer, stack_slots, _debug_recorder->_oopmaps, &_exception_handler_table,
     &_implicit_exception_table, GraalCompiler::instance(), _debug_recorder, _dependencies, NULL, -1, true, false, install_code);
@@ -317,22 +317,23 @@
   id = VmIds::addStub(blob->code_begin());
 }
 
-void CodeInstaller::initialize_fields(oop target_method) {
-  _citarget_method = HotSpotCompilationResult::comp(target_method);
-  _hotspot_method = HotSpotCompilationResult::method(target_method);
+void CodeInstaller::initialize_fields(oop comp_result) {
+  _comp_result = HotSpotCompilationResult::comp(comp_result);
+  _hotspot_method = HotSpotCompilationResult::method(comp_result);
   if (_hotspot_method != NULL) {
-    _parameter_count = getMethodFromHotSpotMethod(_hotspot_method)->size_of_parameters();
+    methodOop method = getMethodFromHotSpotMethod(_hotspot_method);
+    _parameter_count = method->size_of_parameters();
+    TRACE_graal_1("installing code for %s", method->name_and_sig_as_C_string());
   }
-  _name = HotSpotCompilationResult::name(target_method);
-  _sites = (arrayOop) HotSpotCompilationResult::sites(target_method);
-  _exception_handlers = (arrayOop) HotSpotCompilationResult::exceptionHandlers(target_method);
+  _name = HotSpotCompilationResult::name(comp_result);
+  _sites = (arrayOop) HotSpotCompilationResult::sites(comp_result);
+  _exception_handlers = (arrayOop) HotSpotCompilationResult::exceptionHandlers(comp_result);
 
-  _code = (arrayOop) InstalledCode::targetCode(_citarget_method);
-  _code_size = InstalledCode::targetCodeSize(_citarget_method);
+  _code = (arrayOop) InstalledCode::targetCode(_comp_result);
+  _code_size = InstalledCode::targetCodeSize(_comp_result);
   // The frame size we get from the target method does not include the return address, so add one word for it here.
-  _total_frame_size = InstalledCode::frameSize(_citarget_method) + HeapWordSize;
-  _custom_stack_area_offset = InstalledCode::customStackAreaOffset(_citarget_method);
-
+  _total_frame_size = InstalledCode::frameSize(_comp_result) + HeapWordSize;
+  _custom_stack_area_offset = InstalledCode::customStackAreaOffset(_comp_result);
 
   // (very) conservative estimate: each site needs a constant section entry
   _constants_size = _sites->length() * (BytesPerLong*2);
@@ -627,11 +628,12 @@
     assert((call[0] == 0x40 || call[0] == 0x41) && call[1] == 0xFF, "expected call with rex/rexb prefix byte");
     next_pc_offset += 3; /* prefix byte + opcode byte + modrm byte */
   } else if (inst->is_call_reg()) {
-    // the inlined vtable stub contains a "call register" instruction, which isn't recognized here
+    // the inlined vtable stub contains a "call register" instruction
     assert(hotspot_method != NULL, "only valid for virtual calls");
     is_call_reg = true;
-    next_pc_offset = pc_offset + NativeCallReg::instruction_size;
+    next_pc_offset = pc_offset + ((NativeCallReg *) inst)->next_instruction_offset();
   } else {
+    tty->print_cr("at pc_offset %d", pc_offset);
     runtime_call->print();
     fatal("unsupported type of instruction for call site");
   }
--- a/src/share/vm/graal/graalCodeInstaller.hpp	Tue Jul 17 20:07:00 2012 +0200
+++ b/src/share/vm/graal/graalCodeInstaller.hpp	Tue Jul 17 20:07:53 2012 +0200
@@ -56,7 +56,7 @@
 
   ciEnv*        _env;
 
-  oop           _citarget_method;
+  oop           _comp_result;
   oop           _hotspot_method;
   oop           _name;
   arrayOop      _sites;
--- a/src/share/vm/graal/graalCompilerToVM.cpp	Tue Jul 17 20:07:00 2012 +0200
+++ b/src/share/vm/graal/graalCompilerToVM.cpp	Tue Jul 17 20:07:53 2012 +0200
@@ -876,15 +876,15 @@
 }
 
 // public HotSpotCompiledMethod installMethod(HotSpotCompilationResult comp, boolean installCode);
-JNIEXPORT jobject JNICALL Java_com_oracle_graal_hotspot_bridge_CompilerToVMImpl_installMethod(JNIEnv *jniEnv, jobject, jobject comp, jboolean install_code, jobject info) {
+JNIEXPORT jobject JNICALL Java_com_oracle_graal_hotspot_bridge_CompilerToVMImpl_installMethod(JNIEnv *jniEnv, jobject, jobject compResult, jboolean install_code, jobject info) {
   VM_ENTRY_MARK;
   ResourceMark rm;
   HandleMark hm;
-  Handle targetMethodHandle = JNIHandles::resolve(comp);
+  Handle compResultHandle = JNIHandles::resolve(compResult);
   nmethod* nm = NULL;
   Arena arena;
   ciEnv env(&arena);
-  CodeInstaller installer(targetMethodHandle, nm, install_code != 0);
+  CodeInstaller installer(compResultHandle, nm, install_code != 0);
 
   if (info != NULL) {
     arrayOop codeCopy = oopFactory::new_byteArray(nm->code_size(), CHECK_0);
@@ -899,7 +899,7 @@
     Handle obj = instanceKlass::cast(HotSpotCompiledMethod::klass())->allocate_permanent_instance(CHECK_NULL);
     assert(obj() != NULL, "must succeed in allocating instance");
     HotSpotCompiledMethod::set_nmethod(obj, (jlong) nm);
-    HotSpotCompiledMethod::set_method(obj, HotSpotCompilationResult::method(comp));
+    HotSpotCompiledMethod::set_method(obj, HotSpotCompilationResult::method(compResult));
     nm->set_graal_compiled_method(obj());
     return JNIHandles::make_local(obj());
   } else {
--- a/src/share/vm/runtime/sharedRuntime.cpp	Tue Jul 17 20:07:00 2012 +0200
+++ b/src/share/vm/runtime/sharedRuntime.cpp	Tue Jul 17 20:07:53 2012 +0200
@@ -1697,6 +1697,8 @@
 IRT_LEAF(void, SharedRuntime::fixup_callers_callsite(methodOopDesc* method, address caller_pc))
   methodOop moop(method);
 
+  assert(moop->is_oop(false) && moop->is_method(), "method oop from call site is invalid");
+
   address entry_point = moop->from_compiled_entry();
 
   // It's possible that deoptimization can occur at a call site which hasn't