changeset 23429:15ef554f2f2e jdk8u72-b02

Merge
author robm
date Mon, 12 Oct 2015 16:32:38 +0100
parents be740540f60c (diff) 6b76ba76ec80 (current diff)
children 39d920531a4d bf95d7ddba60
files
diffstat 11 files changed, 65 insertions(+), 52 deletions(-) [+]
line wrap: on
line diff
--- a/make/solaris/makefiles/buildtree.make	Tue Oct 06 10:50:38 2015 -0700
+++ b/make/solaris/makefiles/buildtree.make	Mon Oct 12 16:32:38 2015 +0100
@@ -265,9 +265,12 @@
 	    echo "HOTSPOT_EXTRA_SYSDEFS\$$(HOTSPOT_EXTRA_SYSDEFS) = $(HOTSPOT_EXTRA_SYSDEFS)" && \
 	    echo "SYSDEFS += \$$(HOTSPOT_EXTRA_SYSDEFS)"; \
 	echo; \
+	[ -n "$(INCLUDE_TRACE)" ] && \
+	    echo && echo "INCLUDE_TRACE = $(INCLUDE_TRACE)"; \
 	[ -n "$(SPEC)" ] && \
 	    echo "include $(SPEC)"; \
 	echo "include \$$(GAMMADIR)/make/$(OS_FAMILY)/makefiles/$(VARIANT).make"; \
+	echo "include \$$(GAMMADIR)/make/excludeSrc.make"; \
 	echo "include \$$(GAMMADIR)/make/$(OS_FAMILY)/makefiles/$(COMPILER).make"; \
 	) > $@
 
--- a/make/solaris/makefiles/vm.make	Tue Oct 06 10:50:38 2015 -0700
+++ b/make/solaris/makefiles/vm.make	Mon Oct 12 16:32:38 2015 +0100
@@ -204,7 +204,7 @@
 ZERO_SPECIFIC_FILES      := zero
 
 # Always exclude these.
-Src_Files_EXCLUDE := dtrace jsig.c jvmtiEnvRecommended.cpp jvmtiEnvStub.cpp
+Src_Files_EXCLUDE += dtrace jsig.c jvmtiEnvRecommended.cpp jvmtiEnvStub.cpp
 
 # Exclude per type.
 Src_Files_EXCLUDE/CORE      := $(COMPILER1_SPECIFIC_FILES) $(COMPILER2_SPECIFIC_FILES) $(ZERO_SPECIFIC_FILES) $(SHARK_SPECIFIC_FILES) ciTypeFlow.cpp
--- a/src/share/vm/code/codeCache.cpp	Tue Oct 06 10:50:38 2015 -0700
+++ b/src/share/vm/code/codeCache.cpp	Mon Oct 12 16:32:38 2015 +0100
@@ -521,15 +521,17 @@
 
 void CodeCache::gc_epilogue() {
   assert_locked_or_safepoint(CodeCache_lock);
-  FOR_ALL_ALIVE_BLOBS(cb) {
-    if (cb->is_nmethod()) {
-      nmethod *nm = (nmethod*)cb;
-      assert(!nm->is_unloaded(), "Tautology");
-      if (needs_cache_clean()) {
-        nm->cleanup_inline_caches();
+  NOT_DEBUG(if (needs_cache_clean())) {
+    FOR_ALL_ALIVE_BLOBS(cb) {
+      if (cb->is_nmethod()) {
+        nmethod *nm = (nmethod*)cb;
+        assert(!nm->is_unloaded(), "Tautology");
+        DEBUG_ONLY(if (needs_cache_clean())) {
+          nm->cleanup_inline_caches();
+        }
+        DEBUG_ONLY(nm->verify());
+        DEBUG_ONLY(nm->verify_oop_relocations());
       }
-      DEBUG_ONLY(nm->verify());
-      DEBUG_ONLY(nm->verify_oop_relocations());
     }
   }
   set_needs_cache_clean(false);
@@ -734,27 +736,6 @@
   return number_of_marked_CodeBlobs;
 }
 
-void CodeCache::make_marked_nmethods_zombies() {
-  assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
-  FOR_ALL_ALIVE_NMETHODS(nm) {
-    if (nm->is_marked_for_deoptimization()) {
-
-      // If the nmethod has already been made non-entrant and it can be converted
-      // then zombie it now. Otherwise make it non-entrant and it will eventually
-      // be zombied when it is no longer seen on the stack. Note that the nmethod
-      // might be "entrant" and not on the stack and so could be zombied immediately
-      // but we can't tell because we don't track it on stack until it becomes
-      // non-entrant.
-
-      if (nm->is_not_entrant() && nm->can_not_entrant_be_converted()) {
-        nm->make_zombie();
-      } else {
-        nm->make_not_entrant();
-      }
-    }
-  }
-}
-
 void CodeCache::make_marked_nmethods_not_entrant() {
   assert_locked_or_safepoint(CodeCache_lock);
   FOR_ALL_ALIVE_NMETHODS(nm) {
--- a/src/share/vm/code/codeCache.hpp	Tue Oct 06 10:50:38 2015 -0700
+++ b/src/share/vm/code/codeCache.hpp	Mon Oct 12 16:32:38 2015 +0100
@@ -179,7 +179,6 @@
 
   static void mark_all_nmethods_for_deoptimization();
   static int  mark_for_deoptimization(Method* dependee);
-  static void make_marked_nmethods_zombies();
   static void make_marked_nmethods_not_entrant();
 
     // tells how many nmethods have dependencies
--- a/src/share/vm/code/compiledIC.cpp	Tue Oct 06 10:50:38 2015 -0700
+++ b/src/share/vm/code/compiledIC.cpp	Mon Oct 12 16:32:38 2015 +0100
@@ -155,6 +155,14 @@
   return _ic_call->destination();
 }
 
+// Clears the IC stub if the compiled IC is in transition state
+void CompiledIC::clear_ic_stub() {
+  if (is_in_transition_state()) {
+    ICStub* stub = ICStub_from_destination_address(stub_address());
+    stub->clear();
+  }
+}
+
 
 //-----------------------------------------------------------------------------
 // High-level access to an inline cache. Guaranteed to be MT-safe.
@@ -279,6 +287,7 @@
   assert( is_c1_method ||
          !is_monomorphic ||
          is_optimized() ||
+         !caller->is_alive() ||
          (cached_metadata() != NULL && cached_metadata()->is_klass()), "sanity check");
 #endif // ASSERT
   return is_monomorphic;
@@ -313,7 +322,7 @@
 }
 
 
-void CompiledIC::set_to_clean() {
+void CompiledIC::set_to_clean(bool in_use) {
   assert(SafepointSynchronize::is_at_safepoint() || CompiledIC_lock->is_locked() , "MT-unsafe call");
   if (TraceInlineCacheClearing || TraceICs) {
     tty->print_cr("IC@" INTPTR_FORMAT ": set to clean", p2i(instruction_address()));
@@ -329,17 +338,14 @@
 
   // A zombie transition will always be safe, since the metadata has already been set to NULL, so
   // we only need to patch the destination
-  bool safe_transition = is_optimized() || SafepointSynchronize::is_at_safepoint();
+  bool safe_transition = !in_use || is_optimized() || SafepointSynchronize::is_at_safepoint();
 
   if (safe_transition) {
     // Kill any leftover stub we might have too
-    if (is_in_transition_state()) {
-      ICStub* old_stub = ICStub_from_destination_address(stub_address());
-      old_stub->clear();
-    }
+    clear_ic_stub();
     if (is_optimized()) {
-    set_ic_destination(entry);
-  } else {
+      set_ic_destination(entry);
+    } else {
       set_ic_destination_and_value(entry, (void*)NULL);
     }
   } else {
--- a/src/share/vm/code/compiledIC.hpp	Tue Oct 06 10:50:38 2015 -0700
+++ b/src/share/vm/code/compiledIC.hpp	Mon Oct 12 16:32:38 2015 +0100
@@ -228,8 +228,9 @@
   //
   // They all takes a TRAP argument, since they can cause a GC if the inline-cache buffer is full.
   //
-  void set_to_clean();  // Can only be called during a safepoint operation
+  void set_to_clean(bool in_use = true);
   void set_to_monomorphic(CompiledICInfo& info);
+  void clear_ic_stub();
 
   // Returns true if successful and false otherwise. The call can fail if memory
   // allocation in the code cache fails.
--- a/src/share/vm/code/nmethod.cpp	Tue Oct 06 10:50:38 2015 -0700
+++ b/src/share/vm/code/nmethod.cpp	Mon Oct 12 16:32:38 2015 +0100
@@ -1148,9 +1148,20 @@
   }
 }
 
+// Clear ICStubs of all compiled ICs
+void nmethod::clear_ic_stubs() {
+  assert_locked_or_safepoint(CompiledIC_lock);
+  RelocIterator iter(this);
+  while(iter.next()) {
+    if (iter.type() == relocInfo::virtual_call_type) {
+      CompiledIC* ic = CompiledIC_at(&iter);
+      ic->clear_ic_stub();
+    }
+  }
+}
+
 
 void nmethod::cleanup_inline_caches() {
-
   assert_locked_or_safepoint(CompiledIC_lock);
 
   // If the method is not entrant or zombie then a JMP is plastered over the
@@ -1166,7 +1177,8 @@
     // In fact, why are we bothering to look at oops in a non-entrant method??
   }
 
-  // Find all calls in an nmethod, and clear the ones that points to zombie methods
+  // Find all calls in an nmethod and clear the ones that point to non-entrant,
+  // zombie and unloaded nmethods.
   ResourceMark rm;
   RelocIterator iter(this, low_boundary);
   while(iter.next()) {
@@ -1178,8 +1190,8 @@
         CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
         if( cb != NULL && cb->is_nmethod() ) {
           nmethod* nm = (nmethod*)cb;
-          // Clean inline caches pointing to both zombie and not_entrant methods
-          if (!nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean();
+          // Clean inline caches pointing to zombie, non-entrant and unloaded methods
+          if (!nm->is_in_use() || (nm->method()->code() != nm)) ic->set_to_clean(is_alive());
         }
         break;
       }
@@ -1188,7 +1200,7 @@
         CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
         if( cb != NULL && cb->is_nmethod() ) {
           nmethod* nm = (nmethod*)cb;
-          // Clean inline caches pointing to both zombie and not_entrant methods
+          // Clean inline caches pointing to zombie, non-entrant and unloaded methods
           if (!nm->is_in_use() || (nm->method()->code() != nm)) csc->set_to_clean();
         }
         break;
@@ -1279,7 +1291,7 @@
 // Tell if a non-entrant method can be converted to a zombie (i.e.,
 // there are no activations on the stack, not in use by the VM,
 // and not in use by the ServiceThread)
-bool nmethod::can_not_entrant_be_converted() {
+bool nmethod::can_convert_to_zombie() {
   assert(is_not_entrant(), "must be a non-entrant method");
 
   // Since the nmethod sweeper only does partial sweep the sweeper's traversal
@@ -2695,7 +2707,7 @@
   // Hmm. OSR methods can be deopted but not marked as zombie or not_entrant
   // seems odd.
 
-  if( is_zombie() || is_not_entrant() )
+  if (is_zombie() || is_not_entrant() || is_unloaded())
     return;
 
   // Make sure all the entry points are correctly aligned for patching.
--- a/src/share/vm/code/nmethod.hpp	Tue Oct 06 10:50:38 2015 -0700
+++ b/src/share/vm/code/nmethod.hpp	Mon Oct 12 16:32:38 2015 +0100
@@ -577,6 +577,7 @@
 
   // Inline cache support
   void clear_inline_caches();
+  void clear_ic_stubs();
   void cleanup_inline_caches();
   bool inlinecache_check_contains(address addr) const {
     return (addr >= code_begin() && addr < verified_entry_point());
@@ -604,7 +605,7 @@
 
   // See comment at definition of _last_seen_on_stack
   void mark_as_seen_on_stack();
-  bool can_not_entrant_be_converted();
+  bool can_convert_to_zombie();
 
   // Evolution support. We make old (discarded) compiled methods point to new Method*s.
   void set_method(Method* method) { _method = method; }
--- a/src/share/vm/prims/jvmtiRedefineClasses.cpp	Tue Oct 06 10:50:38 2015 -0700
+++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp	Mon Oct 12 16:32:38 2015 +0100
@@ -3751,7 +3751,7 @@
     // Deoptimize all activations depending on marked nmethods
     Deoptimization::deoptimize_dependents();
 
-    // Make the dependent methods not entrant (in VM_Deoptimize they are made zombies)
+    // Make the dependent methods not entrant
     CodeCache::make_marked_nmethods_not_entrant();
 
     // From now on we know that the dependency information is complete
--- a/src/share/vm/runtime/sweeper.cpp	Tue Oct 06 10:50:38 2015 -0700
+++ b/src/share/vm/runtime/sweeper.cpp	Mon Oct 12 16:32:38 2015 +0100
@@ -538,10 +538,14 @@
   } else if (nm->is_not_entrant()) {
     // If there are no current activations of this method on the
     // stack we can safely convert it to a zombie method
-    if (nm->can_not_entrant_be_converted()) {
+    if (nm->can_convert_to_zombie()) {
       if (PrintMethodFlushing && Verbose) {
         tty->print_cr("### Nmethod %3d/" PTR_FORMAT " (not entrant) being made zombie", nm->compile_id(), nm);
       }
+      // Clear ICStubs to prevent back patching stubs of zombie or unloaded
+      // nmethods during the next safepoint (see ICStub::finalize).
+      MutexLocker cl(CompiledIC_lock);
+      nm->clear_ic_stubs();
       // Code cache state change is tracked in make_zombie()
       nm->make_zombie();
       _zombified_count++;
@@ -567,6 +571,12 @@
       release_nmethod(nm);
       _flushed_count++;
     } else {
+      {
+        // Clean ICs of unloaded nmethods as well because they may reference other
+        // unloaded nmethods that may be flushed earlier in the sweeper cycle.
+        MutexLocker cl(CompiledIC_lock);
+        nm->cleanup_inline_caches();
+      }
       // Code cache state change is tracked in make_zombie()
       nm->make_zombie();
       _zombified_count++;
--- a/src/share/vm/runtime/vm_operations.cpp	Tue Oct 06 10:50:38 2015 -0700
+++ b/src/share/vm/runtime/vm_operations.cpp	Mon Oct 12 16:32:38 2015 +0100
@@ -106,8 +106,8 @@
   // Deoptimize all activations depending on marked nmethods
   Deoptimization::deoptimize_dependents();
 
-  // Make the dependent methods zombies
-  CodeCache::make_marked_nmethods_zombies();
+  // Make the dependent methods not entrant
+  CodeCache::make_marked_nmethods_not_entrant();
 }