changeset 12330:29bdcf12457c

8014447: Object.hashCode intrinsic breaks inline caches Summary: Try to inline as normal method first, then fall back to intrinsic. Reviewed-by: kvn, twisti
author shade
date Fri, 27 Sep 2013 11:52:24 +0400
parents f2512d89ad0c
children d8d059e90ec1
files src/share/vm/opto/callGenerator.hpp src/share/vm/opto/doCall.cpp src/share/vm/opto/library_call.cpp
diffstat 3 files changed, 28 insertions(+), 3 deletions(-) [+]
line wrap: on
line diff
--- a/src/share/vm/opto/callGenerator.hpp	Sat Sep 28 12:42:22 2013 -0700
+++ b/src/share/vm/opto/callGenerator.hpp	Fri Sep 27 11:52:24 2013 +0400
@@ -65,6 +65,8 @@
   virtual bool      is_predicted() const        { return false; }
   // is_trap: Does not return to the caller.  (E.g., uncommon trap.)
   virtual bool      is_trap() const             { return false; }
+  // does_virtual_dispatch: Should try inlining as normal method first.
+  virtual bool      does_virtual_dispatch() const     { return false; }
 
   // is_late_inline: supports conversion of call into an inline
   virtual bool      is_late_inline() const      { return false; }
--- a/src/share/vm/opto/doCall.cpp	Sat Sep 28 12:42:22 2013 -0700
+++ b/src/share/vm/opto/doCall.cpp	Fri Sep 27 11:52:24 2013 +0400
@@ -110,6 +110,7 @@
   // then we return it as the inlined version of the call.
   // We do this before the strict f.p. check below because the
   // intrinsics handle strict f.p. correctly.
+  CallGenerator* cg_intrinsic = NULL;
   if (allow_inline && allow_intrinsics) {
     CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
     if (cg != NULL) {
@@ -121,7 +122,16 @@
           cg = CallGenerator::for_predicted_intrinsic(cg, inline_cg);
         }
       }
-      return cg;
+
+      // If intrinsic does the virtual dispatch, we try to use the type profile
+      // first, and hopefully inline it as the regular virtual call below.
+      // We will retry the intrinsic if nothing had claimed it afterwards.
+      if (cg->does_virtual_dispatch()) {
+        cg_intrinsic = cg;
+        cg = NULL;
+      } else {
+        return cg;
+      }
     }
   }
 
@@ -266,6 +276,13 @@
     }
   }
 
+  // Nothing claimed the intrinsic, we go with straight-forward inlining
+  // for already discovered intrinsic.
+  if (allow_inline && allow_intrinsics && cg_intrinsic != NULL) {
+    assert(cg_intrinsic->does_virtual_dispatch(), "sanity");
+    return cg_intrinsic;
+  }
+
   // There was no special inlining tactic, or it bailed out.
   // Use a more generic tactic, like a simple call.
   if (call_does_dispatch) {
--- a/src/share/vm/opto/library_call.cpp	Sat Sep 28 12:42:22 2013 -0700
+++ b/src/share/vm/opto/library_call.cpp	Fri Sep 27 11:52:24 2013 +0400
@@ -47,19 +47,22 @@
  private:
   bool             _is_virtual;
   bool             _is_predicted;
+  bool             _does_virtual_dispatch;
   vmIntrinsics::ID _intrinsic_id;
 
  public:
-  LibraryIntrinsic(ciMethod* m, bool is_virtual, bool is_predicted, vmIntrinsics::ID id)
+  LibraryIntrinsic(ciMethod* m, bool is_virtual, bool is_predicted, bool does_virtual_dispatch, vmIntrinsics::ID id)
     : InlineCallGenerator(m),
       _is_virtual(is_virtual),
       _is_predicted(is_predicted),
+      _does_virtual_dispatch(does_virtual_dispatch),
       _intrinsic_id(id)
   {
   }
   virtual bool is_intrinsic() const { return true; }
   virtual bool is_virtual()   const { return _is_virtual; }
   virtual bool is_predicted()   const { return _is_predicted; }
+  virtual bool does_virtual_dispatch()   const { return _does_virtual_dispatch; }
   virtual JVMState* generate(JVMState* jvms);
   virtual Node* generate_predicate(JVMState* jvms);
   vmIntrinsics::ID intrinsic_id() const { return _intrinsic_id; }
@@ -355,6 +358,7 @@
   }
 
   bool is_predicted = false;
+  bool does_virtual_dispatch = false;
 
   switch (id) {
   case vmIntrinsics::_compareTo:
@@ -381,8 +385,10 @@
     break;
   case vmIntrinsics::_hashCode:
     if (!InlineObjectHash)  return NULL;
+    does_virtual_dispatch = true;
     break;
   case vmIntrinsics::_clone:
+    does_virtual_dispatch = true;
   case vmIntrinsics::_copyOf:
   case vmIntrinsics::_copyOfRange:
     if (!InlineObjectCopy)  return NULL;
@@ -541,7 +547,7 @@
     if (!InlineUnsafeOps)  return NULL;
   }
 
-  return new LibraryIntrinsic(m, is_virtual, is_predicted, (vmIntrinsics::ID) id);
+  return new LibraryIntrinsic(m, is_virtual, is_predicted, does_virtual_dispatch, (vmIntrinsics::ID) id);
 }
 
 //----------------------register_library_intrinsics-----------------------