diff src/share/vm/memory/referenceProcessor.hpp @ 14909:4ca6dc0799b6

Backout jdk9 merge
author Gilles Duboscq <duboscq@ssw.jku.at>
date Tue, 01 Apr 2014 13:57:07 +0200
parents d8041d695d19
children 52b4284cb496
line wrap: on
line diff
--- a/src/share/vm/memory/referenceProcessor.hpp	Tue Apr 01 14:09:03 2014 +0200
+++ b/src/share/vm/memory/referenceProcessor.hpp	Tue Apr 01 13:57:07 2014 +0200
@@ -99,7 +99,6 @@
   oop                _referent;
   OopClosure*        _keep_alive;
   BoolObjectClosure* _is_alive;
-  bool               _discovered_list_needs_post_barrier;
 
   DEBUG_ONLY(
   oop                _first_seen; // cyclic linked list check
@@ -113,8 +112,7 @@
 public:
   inline DiscoveredListIterator(DiscoveredList&    refs_list,
                                 OopClosure*        keep_alive,
-                                BoolObjectClosure* is_alive,
-                                bool               discovered_list_needs_post_barrier = false):
+                                BoolObjectClosure* is_alive):
     _refs_list(refs_list),
     _prev_next(refs_list.adr_head()),
     _prev(NULL),
@@ -128,8 +126,7 @@
 #endif
     _next(NULL),
     _keep_alive(keep_alive),
-    _is_alive(is_alive),
-    _discovered_list_needs_post_barrier(discovered_list_needs_post_barrier)
+    _is_alive(is_alive)
 { }
 
   // End Of List.
@@ -231,13 +228,14 @@
   bool        _discovery_is_mt;         // true if reference discovery is MT.
 
   // If true, setting "next" field of a discovered refs list requires
-  // write post barrier.  (Must be true if used in a collector in which
+  // write barrier(s).  (Must be true if used in a collector in which
   // elements of a discovered list may be moved during discovery: for
   // example, a collector like Garbage-First that moves objects during a
   // long-term concurrent marking phase that does weak reference
   // discovery.)
-  bool        _discovered_list_needs_post_barrier;
+  bool        _discovered_list_needs_barrier;
 
+  BarrierSet* _bs;                      // Cached copy of BarrierSet.
   bool        _enqueuing_is_done;       // true if all weak references enqueued
   bool        _processing_is_mt;        // true during phases when
                                         // reference processing is MT.
@@ -383,8 +381,8 @@
 
  protected:
   // Set the 'discovered' field of the given reference to
-  // the given value - emitting post barriers depending upon
-  // the value of _discovered_list_needs_post_barrier.
+  // the given value - emitting barriers depending upon
+  // the value of _discovered_list_needs_barrier.
   void set_discovered(oop ref, oop value);
 
   // "Preclean" the given discovered reference list
@@ -422,13 +420,32 @@
   void update_soft_ref_master_clock();
 
  public:
+  // constructor
+  ReferenceProcessor():
+    _span((HeapWord*)NULL, (HeapWord*)NULL),
+    _discovered_refs(NULL),
+    _discoveredSoftRefs(NULL),  _discoveredWeakRefs(NULL),
+    _discoveredFinalRefs(NULL), _discoveredPhantomRefs(NULL),
+    _discovering_refs(false),
+    _discovery_is_atomic(true),
+    _enqueuing_is_done(false),
+    _discovery_is_mt(false),
+    _discovered_list_needs_barrier(false),
+    _bs(NULL),
+    _is_alive_non_header(NULL),
+    _num_q(0),
+    _max_num_q(0),
+    _processing_is_mt(false),
+    _next_id(0)
+  { }
+
   // Default parameters give you a vanilla reference processor.
   ReferenceProcessor(MemRegion span,
                      bool mt_processing = false, uint mt_processing_degree = 1,
                      bool mt_discovery  = false, uint mt_discovery_degree  = 1,
                      bool atomic_discovery = true,
                      BoolObjectClosure* is_alive_non_header = NULL,
-                     bool discovered_list_needs_post_barrier = false);
+                     bool discovered_list_needs_barrier = false);
 
   // RefDiscoveryPolicy values
   enum DiscoveryPolicy {
@@ -477,7 +494,7 @@
   bool processing_is_mt() const { return _processing_is_mt; }
   void set_mt_processing(bool mt) { _processing_is_mt = mt; }
 
-  // whether all enqueueing of weak references is complete
+  // whether all enqueuing of weak references is complete
   bool enqueuing_is_done()  { return _enqueuing_is_done; }
   void set_enqueuing_is_done(bool v) { _enqueuing_is_done = v; }