changeset 15046:5c71dcf0915d

use hotspot static stubs for call sites
author Tom Rodriguez <tom.rodriguez@oracle.com>
date Wed, 09 Apr 2014 16:33:37 -0700
parents 0286888f792b
children d8b9e3761e52
files graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotNodeLIRBuilder.java graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotspotDirectStaticCallOp.java src/cpu/sparc/vm/compiledIC_sparc.cpp src/cpu/x86/vm/compiledIC_x86.cpp src/share/vm/code/compiledIC.cpp src/share/vm/code/compiledIC.hpp src/share/vm/graal/graalCodeInstaller.cpp src/share/vm/graal/graalCodeInstaller.hpp
diffstat 8 files changed, 57 insertions(+), 50 deletions(-) [+]
line wrap: on
line diff
--- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotNodeLIRBuilder.java	Wed Apr 09 16:33:32 2014 -0700
+++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotNodeLIRBuilder.java	Wed Apr 09 16:33:37 2014 -0700
@@ -120,8 +120,7 @@
             assert invokeKind == InvokeKind.Static || invokeKind == InvokeKind.Special;
             HotSpotResolvedJavaMethod resolvedMethod = (HotSpotResolvedJavaMethod) callTarget.target();
             assert !Modifier.isAbstract(resolvedMethod.getModifiers()) : "Cannot make direct call to abstract method.";
-            Constant metaspaceMethod = resolvedMethod.getMetaspaceMethodConstant();
-            append(new AMD64HotspotDirectStaticCallOp(callTarget.target(), result, parameters, temps, callState, invokeKind, metaspaceMethod));
+            append(new AMD64HotspotDirectStaticCallOp(callTarget.target(), result, parameters, temps, callState, invokeKind));
         }
     }
 
--- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotspotDirectStaticCallOp.java	Wed Apr 09 16:33:32 2014 -0700
+++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotspotDirectStaticCallOp.java	Wed Apr 09 16:33:37 2014 -0700
@@ -22,14 +22,11 @@
  */
 package com.oracle.graal.hotspot.amd64;
 
-import com.oracle.graal.amd64.*;
 import com.oracle.graal.api.meta.*;
 import com.oracle.graal.asm.amd64.*;
-import com.oracle.graal.hotspot.*;
 import com.oracle.graal.hotspot.meta.HotSpotCodeCacheProvider.MarkId;
 import com.oracle.graal.lir.*;
 import com.oracle.graal.lir.amd64.AMD64Call.DirectCallOp;
-import com.oracle.graal.lir.amd64.*;
 import com.oracle.graal.lir.asm.*;
 import com.oracle.graal.nodes.java.MethodCallTargetNode.InvokeKind;
 
@@ -40,24 +37,17 @@
 @Opcode("CALL_DIRECT")
 final class AMD64HotspotDirectStaticCallOp extends DirectCallOp {
 
-    private final Constant metaspaceMethod;
     private final InvokeKind invokeKind;
 
-    AMD64HotspotDirectStaticCallOp(ResolvedJavaMethod target, Value result, Value[] parameters, Value[] temps, LIRFrameState state, InvokeKind invokeKind, Constant metaspaceMethod) {
+    AMD64HotspotDirectStaticCallOp(ResolvedJavaMethod target, Value result, Value[] parameters, Value[] temps, LIRFrameState state, InvokeKind invokeKind) {
         super(target, result, parameters, temps, state);
         assert invokeKind == InvokeKind.Static || invokeKind == InvokeKind.Special;
-        this.metaspaceMethod = metaspaceMethod;
         this.invokeKind = invokeKind;
     }
 
     @Override
     public void emitCode(CompilationResultBuilder crb, AMD64MacroAssembler masm) {
-        // The mark for an invocation that uses an inline cache must be placed at the
-        // instruction that loads the Klass from the inline cache.
-        AMD64Move.move(crb, masm, AMD64.rbx.asValue(Kind.Long), metaspaceMethod);
         MarkId.recordMark(crb, invokeKind == InvokeKind.Static ? MarkId.INVOKESTATIC : MarkId.INVOKESPECIAL);
-        // This must be emitted exactly like this to ensure it's patchable
-        masm.movq(AMD64.rax, HotSpotGraalRuntime.runtime().getConfig().nonOopBits);
         super.emitCode(crb, masm);
     }
 }
--- a/src/cpu/sparc/vm/compiledIC_sparc.cpp	Wed Apr 09 16:33:32 2014 -0700
+++ b/src/cpu/sparc/vm/compiledIC_sparc.cpp	Wed Apr 09 16:33:37 2014 -0700
@@ -81,25 +81,26 @@
 // ----------------------------------------------------------------------------
 
 #define __ _masm.
-void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
-#ifdef COMPILER2
+void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) {
   // Stub is fixed up when the corresponding call is converted from calling
   // compiled code to calling interpreted code.
   // set (empty), G5
   // jmp -1
 
-  address mark = cbuf.insts_mark();  // Get mark within main instrs section.
+  if (mark == NULL) {
+    mark = cbuf.insts_mark();  // Get mark within main instrs section.
+  }
 
   MacroAssembler _masm(&cbuf);
 
   address base =
-  __ start_a_stub(to_interp_stub_size()*2);
-  if (base == NULL) return;  // CodeBuffer::expand failed.
+  __ start_a_stub(to_interp_stub_size());
+  guarantee(base != NULL, "out of space");
 
   // Static stub relocation stores the instruction address of the call.
   __ relocate(static_stub_Relocation::spec(mark));
 
-  __ set_metadata(NULL, as_Register(Matcher::inline_cache_reg_encode()));
+  __ set_metadata(NULL, G5);
 
   __ set_inst_mark();
   AddressLiteral addrlit(-1);
@@ -107,11 +108,10 @@
 
   __ delayed()->nop();
 
+  assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size"); 
+
   // Update current stubs pointer and restore code_end.
   __ end_a_stub();
-#else
-  ShouldNotReachHere();
-#endif
 }
 #undef __
 
--- a/src/cpu/x86/vm/compiledIC_x86.cpp	Wed Apr 09 16:33:32 2014 -0700
+++ b/src/cpu/x86/vm/compiledIC_x86.cpp	Wed Apr 09 16:33:37 2014 -0700
@@ -78,21 +78,24 @@
 // ----------------------------------------------------------------------------
 
 #define __ _masm.
-void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
+void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) {
   // Stub is fixed up when the corresponding call is converted from
   // calling compiled code to calling interpreted code.
   // movq rbx, 0
   // jmp -5 # to self
 
-  address mark = cbuf.insts_mark();  // Get mark within main instrs section.
+  if (mark == NULL) {
+    mark = cbuf.insts_mark();  // Get mark within main instrs section.
+  }
 
   // Note that the code buffer's insts_mark is always relative to insts.
   // That's why we must use the macroassembler to generate a stub.
   MacroAssembler _masm(&cbuf);
 
   address base =
-  __ start_a_stub(to_interp_stub_size()*2);
-  if (base == NULL) return;  // CodeBuffer::expand failed.
+  __ start_a_stub(to_interp_stub_size());
+  guarantee(base != NULL, "out of space");
+
   // Static stub relocation stores the instruction address of the call.
   __ relocate(static_stub_Relocation::spec(mark), Assembler::imm_operand);
   // Static stub relocation also tags the Method* in the code-stream.
@@ -100,6 +103,8 @@
   // This is recognized as unresolved by relocs/nativeinst/ic code.
   __ jump(RuntimeAddress(__ pc()));
 
+  assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size"); 
+
   // Update current stubs pointer and restore insts_end.
   __ end_a_stub();
 }
@@ -117,12 +122,6 @@
 
 void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
   address stub = find_stub();
-#ifdef GRAAL
-  if (stub == NULL) {
-    set_destination_mt_safe(entry);
-    return;
-  }
-#endif
   guarantee(stub != NULL, "stub not found");
 
   if (TraceICs) {
@@ -172,14 +171,12 @@
     verify_alignment();
   }
 
-#ifndef GRAAL
   // Verify stub.
   address stub = find_stub();
   assert(stub != NULL, "no stub found for static call");
   // Creation also verifies the object.
   NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
   NativeJump*        jump          = nativeJump_at(method_holder->next_instruction_address());
-#endif
 
   // Verify state.
   assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
--- a/src/share/vm/code/compiledIC.cpp	Wed Apr 09 16:33:32 2014 -0700
+++ b/src/share/vm/code/compiledIC.cpp	Wed Apr 09 16:33:37 2014 -0700
@@ -76,7 +76,7 @@
   // Don't use ic_destination for this test since that forwards
   // through ICBuffer instead of returning the actual current state of
   // the CompiledIC.
-  if (is_icholder_entry(_ic_call->destination()) GRAAL_ONLY(&& _value != NULL)) {
+  if (is_icholder_entry(_ic_call->destination())) {
     // When patching for the ICStub case the cached value isn't
     // overwritten until the ICStub copied into the CompiledIC during
     // the next safepoint.  Make sure that the CompiledICHolder* is
@@ -107,13 +107,6 @@
   _ic_call->set_destination_mt_safe(entry_point);
 }
 
-#ifdef GRAAL
-  if (_value == NULL) {
-    // Can happen when Graal converted a virtual call into an invoke special based on static analysis.
-    return;
-  }
-#endif
-
   if (is_optimized() || is_icstub) {
     // Optimized call sites don't have a cache value and ICStub call
     // sites only change the entry point.  Changing the value in that
@@ -238,8 +231,8 @@
   // for calling directly to vep without using the inline cache (i.e., cached_value == NULL)
 #ifdef ASSERT
   CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address());
-  bool is_c1_or_graal_method = caller->is_compiled_by_c1() || caller->is_compiled_by_graal();
-  assert( is_c1_or_graal_method ||
+  bool is_c1_method = caller->is_compiled_by_c1();
+  assert( is_c1_method ||
          !is_monomorphic ||
          is_optimized() ||
          (cached_metadata() != NULL && cached_metadata()->is_klass()), "sanity check");
@@ -264,14 +257,12 @@
     // Check if we are calling into our own codeblob (i.e., to a stub)
     CodeBlob* cb = CodeCache::find_blob(_ic_call->instruction_address());
     address dest = ic_destination();
-#ifndef GRAAL
 #ifdef ASSERT
     {
       CodeBlob* db = CodeCache::find_blob_unsafe(dest);
       assert(!db->is_adapter_blob(), "must use stub!");
     }
 #endif /* ASSERT */
-#endif
     is_call_to_interpreted = cb->contains(dest);
   }
   return is_call_to_interpreted;
--- a/src/share/vm/code/compiledIC.hpp	Wed Apr 09 16:33:32 2014 -0700
+++ b/src/share/vm/code/compiledIC.hpp	Wed Apr 09 16:33:37 2014 -0700
@@ -308,7 +308,7 @@
   friend CompiledStaticCall* compiledStaticCall_at(Relocation* call_site);
 
   // Code
-  static void emit_to_interp_stub(CodeBuffer &cbuf);
+  static void emit_to_interp_stub(CodeBuffer &cbuf, address mark = NULL);
   static int to_interp_stub_size();
   static int reloc_to_interp_stub();
 
--- a/src/share/vm/graal/graalCodeInstaller.cpp	Wed Apr 09 16:33:32 2014 -0700
+++ b/src/share/vm/graal/graalCodeInstaller.cpp	Wed Apr 09 16:33:37 2014 -0700
@@ -22,6 +22,7 @@
  */
 
 #include "precompiled.hpp"
+#include "code/compiledIC.hpp"
 #include "compiler/compileBroker.hpp"
 #include "compiler/disassembler.hpp"
 #include "runtime/javaCalls.hpp"
@@ -474,12 +475,35 @@
   _next_call_type = INVOKE_INVALID;
 }
 
+int CodeInstaller::estimate_stub_entries() {
+  // Estimate the number of static call stubs that might be emitted.
+  int static_call_stubs = 0;
+  for (int i = 0; i < _sites->length(); i++) {
+    oop site = ((objArrayOop) (_sites))->obj_at(i);
+    if (site->is_a(CompilationResult_Mark::klass())) {
+      oop id_obj = CompilationResult_Mark::id(site);
+      if (id_obj != NULL) {
+        assert(java_lang_boxing_object::is_instance(id_obj, T_INT), "Integer id expected");
+        jint id = id_obj->int_field(java_lang_boxing_object::value_offset_in_bytes(T_INT));
+        if (id == INVOKESTATIC || id == INVOKESPECIAL) {
+          static_call_stubs++;
+        }
+      }
+    }
+  }
+  return static_call_stubs;
+}
+
 // perform data and call relocation on the CodeBuffer
 bool CodeInstaller::initialize_buffer(CodeBuffer& buffer) {
   int locs_buffer_size = _sites->length() * (relocInfo::length_limit + sizeof(relocInfo));
   char* locs_buffer = NEW_RESOURCE_ARRAY(char, locs_buffer_size);
   buffer.insts()->initialize_shared_locs((relocInfo*)locs_buffer, locs_buffer_size / sizeof(relocInfo));
-  buffer.initialize_stubs_size(256);
+  // Allocate enough space in the stub section for the static call
+  // stubs.  Stubs have extra relocs but they are managed by the stub
+  // section itself so they don't need to be accounted for in the
+  // locs_buffer above.
+  buffer.initialize_stubs_size(estimate_stub_entries() * CompiledStaticCall::to_interp_stub_size());
   buffer.initialize_consts_size(_constants_size);
 
   _debug_recorder = new DebugInformationRecorder(_oop_recorder);
@@ -778,6 +802,10 @@
 
     TRACE_graal_3("method call");
     CodeInstaller::pd_relocate_JavaMethod(hotspot_method, pc_offset);
+    if (_next_call_type == INVOKESTATIC || _next_call_type == INVOKESPECIAL) {
+      // Need a static call stub for transitions from compiled to interpreted.
+      CompiledStaticCall::emit_to_interp_stub(buffer, _instructions->start() + pc_offset);
+    }
   }
 
   _next_call_type = INVOKE_INVALID;
--- a/src/share/vm/graal/graalCodeInstaller.hpp	Wed Apr 09 16:33:32 2014 -0700
+++ b/src/share/vm/graal/graalCodeInstaller.hpp	Wed Apr 09 16:33:37 2014 -0700
@@ -102,7 +102,9 @@
   // extract the fields of the CompilationResult
   void initialize_fields(oop target_method);
   void initialize_assumptions(oop target_method);
-
+  
+  int estimate_stub_entries();
+  
   // perform data and call relocation on the CodeBuffer
   bool initialize_buffer(CodeBuffer& buffer);
 
@@ -120,7 +122,7 @@
   void record_scope(jint pc_offset, oop code_pos, GrowableArray<ScopeValue*>* objects);
 
   void process_exception_handlers();
-
+  int estimateStubSpace(int static_call_stubs);
 };
 
 /**