comparison src/share/vm/memory/referenceProcessor.cpp @ 3915:c2bf0120ee5d

7085906: Replace the permgen allocated sentinelRef with a self-looped end Summary: Remove the sentinelRef and let the last Reference in a discovered chain point back to itself. Reviewed-by: ysr, jmasa
author stefank
date Thu, 01 Sep 2011 16:18:17 +0200
parents 92da084fefc9
children eca1193ca245
comparison
equal deleted inserted replaced
3913:27702f012017 3915:c2bf0120ee5d
33 #include "runtime/java.hpp" 33 #include "runtime/java.hpp"
34 #include "runtime/jniHandles.hpp" 34 #include "runtime/jniHandles.hpp"
35 35
36 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL; 36 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
37 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL; 37 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL;
38 oop ReferenceProcessor::_sentinelRef = NULL;
39 const int subclasses_of_ref = REF_PHANTOM - REF_OTHER; 38 const int subclasses_of_ref = REF_PHANTOM - REF_OTHER;
40 39
41 // List of discovered references. 40 // List of discovered references.
42 class DiscoveredList { 41 class DiscoveredList {
43 public: 42 public:
44 DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { } 43 DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
45 oop head() const { 44 oop head() const {
46 return UseCompressedOops ? oopDesc::decode_heap_oop_not_null(_compressed_head) : 45 return UseCompressedOops ? oopDesc::decode_heap_oop(_compressed_head) :
47 _oop_head; 46 _oop_head;
48 } 47 }
49 HeapWord* adr_head() { 48 HeapWord* adr_head() {
50 return UseCompressedOops ? (HeapWord*)&_compressed_head : 49 return UseCompressedOops ? (HeapWord*)&_compressed_head :
51 (HeapWord*)&_oop_head; 50 (HeapWord*)&_oop_head;
52 } 51 }
53 void set_head(oop o) { 52 void set_head(oop o) {
54 if (UseCompressedOops) { 53 if (UseCompressedOops) {
55 // Must compress the head ptr. 54 // Must compress the head ptr.
56 _compressed_head = oopDesc::encode_heap_oop_not_null(o); 55 _compressed_head = oopDesc::encode_heap_oop(o);
57 } else { 56 } else {
58 _oop_head = o; 57 _oop_head = o;
59 } 58 }
60 } 59 }
61 bool empty() const { return head() == ReferenceProcessor::sentinel_ref(); } 60 bool empty() const { return head() == NULL; }
62 size_t length() { return _len; } 61 size_t length() { return _len; }
63 void set_length(size_t len) { _len = len; } 62 void set_length(size_t len) { _len = len; }
64 void inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); } 63 void inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
65 void dec_length(size_t dec) { _len -= dec; } 64 void dec_length(size_t dec) { _len -= dec; }
66 private: 65 private:
74 void referenceProcessor_init() { 73 void referenceProcessor_init() {
75 ReferenceProcessor::init_statics(); 74 ReferenceProcessor::init_statics();
76 } 75 }
77 76
78 void ReferenceProcessor::init_statics() { 77 void ReferenceProcessor::init_statics() {
79 assert(_sentinelRef == NULL, "should be initialized precisely once");
80 EXCEPTION_MARK;
81 _sentinelRef = instanceKlass::cast(
82 SystemDictionary::Reference_klass())->
83 allocate_permanent_instance(THREAD);
84
85 // Initialize the master soft ref clock. 78 // Initialize the master soft ref clock.
86 java_lang_ref_SoftReference::set_clock(os::javaTimeMillis()); 79 java_lang_ref_SoftReference::set_clock(os::javaTimeMillis());
87 80
88 if (HAS_PENDING_EXCEPTION) {
89 Handle ex(THREAD, PENDING_EXCEPTION);
90 vm_exit_during_initialization(ex);
91 }
92 assert(_sentinelRef != NULL && _sentinelRef->is_oop(),
93 "Just constructed it!");
94 _always_clear_soft_ref_policy = new AlwaysClearPolicy(); 81 _always_clear_soft_ref_policy = new AlwaysClearPolicy();
95 _default_soft_ref_policy = new COMPILER2_PRESENT(LRUMaxHeapPolicy()) 82 _default_soft_ref_policy = new COMPILER2_PRESENT(LRUMaxHeapPolicy())
96 NOT_COMPILER2(LRUCurrentHeapPolicy()); 83 NOT_COMPILER2(LRUCurrentHeapPolicy());
97 if (_always_clear_soft_ref_policy == NULL || _default_soft_ref_policy == NULL) { 84 if (_always_clear_soft_ref_policy == NULL || _default_soft_ref_policy == NULL) {
98 vm_exit_during_initialization("Could not allocate reference policy object"); 85 vm_exit_during_initialization("Could not allocate reference policy object");
128 vm_exit_during_initialization("Could not allocated RefProc Array"); 115 vm_exit_during_initialization("Could not allocated RefProc Array");
129 } 116 }
130 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q]; 117 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q];
131 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q]; 118 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q];
132 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q]; 119 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
133 assert(sentinel_ref() != NULL, "_sentinelRef is NULL"); 120 // Initialized all entries to NULL
134 // Initialized all entries to _sentinelRef
135 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) { 121 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
136 _discoveredSoftRefs[i].set_head(sentinel_ref()); 122 _discoveredSoftRefs[i].set_head(NULL);
137 _discoveredSoftRefs[i].set_length(0); 123 _discoveredSoftRefs[i].set_length(0);
138 } 124 }
139 // If we do barreirs, cache a copy of the barrier set. 125 // If we do barreirs, cache a copy of the barrier set.
140 if (discovered_list_needs_barrier) { 126 if (discovered_list_needs_barrier) {
141 _bs = Universe::heap()->barrier_set(); 127 _bs = Universe::heap()->barrier_set();
163 f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head()); 149 f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
164 } else { 150 } else {
165 f->do_oop((oop*)_discoveredSoftRefs[i].adr_head()); 151 f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
166 } 152 }
167 } 153 }
168 }
169
170 void ReferenceProcessor::oops_do(OopClosure* f) {
171 f->do_oop(adr_sentinel_ref());
172 } 154 }
173 155
174 void ReferenceProcessor::update_soft_ref_master_clock() { 156 void ReferenceProcessor::update_soft_ref_master_clock() {
175 // Update (advance) the soft ref master clock field. This must be done 157 // Update (advance) the soft ref master clock field. This must be done
176 // after processing the soft ref list. 158 // after processing the soft ref list.
281 unsigned int count = count_jni_refs(); 263 unsigned int count = count_jni_refs();
282 gclog_or_tty->print(", %u refs", count); 264 gclog_or_tty->print(", %u refs", count);
283 } 265 }
284 #endif 266 #endif
285 JNIHandles::weak_oops_do(is_alive, keep_alive); 267 JNIHandles::weak_oops_do(is_alive, keep_alive);
286 // Finally remember to keep sentinel around
287 keep_alive->do_oop(adr_sentinel_ref());
288 complete_gc->do_void(); 268 complete_gc->do_void();
289 } 269 }
290 270
291 271
292 template <class T> 272 template <class T>
332 // to the pending list. 312 // to the pending list.
333 if (TraceReferenceGC && PrintGCDetails) { 313 if (TraceReferenceGC && PrintGCDetails) {
334 gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list " 314 gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
335 INTPTR_FORMAT, (address)refs_list.head()); 315 INTPTR_FORMAT, (address)refs_list.head());
336 } 316 }
337 oop obj = refs_list.head(); 317
318 oop obj = NULL;
319 oop next = refs_list.head();
338 // Walk down the list, copying the discovered field into 320 // Walk down the list, copying the discovered field into
339 // the next field and clearing it (except for the last 321 // the next field and clearing it.
340 // non-sentinel object which is treated specially to avoid 322 while (obj != next) {
341 // confusion with an active reference). 323 obj = next;
342 while (obj != sentinel_ref()) {
343 assert(obj->is_instanceRef(), "should be reference object"); 324 assert(obj->is_instanceRef(), "should be reference object");
344 oop next = java_lang_ref_Reference::discovered(obj); 325 next = java_lang_ref_Reference::discovered(obj);
345 if (TraceReferenceGC && PrintGCDetails) { 326 if (TraceReferenceGC && PrintGCDetails) {
346 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next " INTPTR_FORMAT, 327 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next " INTPTR_FORMAT,
347 obj, next); 328 obj, next);
348 } 329 }
349 assert(java_lang_ref_Reference::next(obj) == NULL, 330 assert(java_lang_ref_Reference::next(obj) == NULL,
350 "The reference should not be enqueued"); 331 "The reference should not be enqueued");
351 if (next == sentinel_ref()) { // obj is last 332 if (next == obj) { // obj is last
352 // Swap refs_list into pendling_list_addr and 333 // Swap refs_list into pendling_list_addr and
353 // set obj's next to what we read from pending_list_addr. 334 // set obj's next to what we read from pending_list_addr.
354 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr); 335 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
355 // Need oop_check on pending_list_addr above; 336 // Need oop_check on pending_list_addr above;
356 // see special oop-check code at the end of 337 // see special oop-check code at the end of
364 } 345 }
365 } else { 346 } else {
366 java_lang_ref_Reference::set_next(obj, next); 347 java_lang_ref_Reference::set_next(obj, next);
367 } 348 }
368 java_lang_ref_Reference::set_discovered(obj, (oop) NULL); 349 java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
369 obj = next;
370 } 350 }
371 } 351 }
372 352
373 // Parallel enqueue task 353 // Parallel enqueue task
374 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask { 354 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
375 public: 355 public:
376 RefProcEnqueueTask(ReferenceProcessor& ref_processor, 356 RefProcEnqueueTask(ReferenceProcessor& ref_processor,
377 DiscoveredList discovered_refs[], 357 DiscoveredList discovered_refs[],
378 HeapWord* pending_list_addr, 358 HeapWord* pending_list_addr,
379 oop sentinel_ref,
380 int n_queues) 359 int n_queues)
381 : EnqueueTask(ref_processor, discovered_refs, 360 : EnqueueTask(ref_processor, discovered_refs,
382 pending_list_addr, sentinel_ref, n_queues) 361 pending_list_addr, n_queues)
383 { } 362 { }
384 363
385 virtual void work(unsigned int work_id) { 364 virtual void work(unsigned int work_id) {
386 assert(work_id < (unsigned int)_ref_processor.max_num_q(), "Index out-of-bounds"); 365 assert(work_id < (unsigned int)_ref_processor.max_num_q(), "Index out-of-bounds");
387 // Simplest first cut: static partitioning. 366 // Simplest first cut: static partitioning.
394 for (int j = 0; 373 for (int j = 0;
395 j < subclasses_of_ref; 374 j < subclasses_of_ref;
396 j++, index += _n_queues) { 375 j++, index += _n_queues) {
397 _ref_processor.enqueue_discovered_reflist( 376 _ref_processor.enqueue_discovered_reflist(
398 _refs_lists[index], _pending_list_addr); 377 _refs_lists[index], _pending_list_addr);
399 _refs_lists[index].set_head(_sentinel_ref); 378 _refs_lists[index].set_head(NULL);
400 _refs_lists[index].set_length(0); 379 _refs_lists[index].set_length(0);
401 } 380 }
402 } 381 }
403 }; 382 };
404 383
406 void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr, 385 void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr,
407 AbstractRefProcTaskExecutor* task_executor) { 386 AbstractRefProcTaskExecutor* task_executor) {
408 if (_processing_is_mt && task_executor != NULL) { 387 if (_processing_is_mt && task_executor != NULL) {
409 // Parallel code 388 // Parallel code
410 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs, 389 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
411 pending_list_addr, sentinel_ref(), _max_num_q); 390 pending_list_addr, _max_num_q);
412 task_executor->execute(tsk); 391 task_executor->execute(tsk);
413 } else { 392 } else {
414 // Serial code: call the parent class's implementation 393 // Serial code: call the parent class's implementation
415 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) { 394 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
416 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr); 395 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
417 _discoveredSoftRefs[i].set_head(sentinel_ref()); 396 _discoveredSoftRefs[i].set_head(NULL);
418 _discoveredSoftRefs[i].set_length(0); 397 _discoveredSoftRefs[i].set_length(0);
419 } 398 }
420 } 399 }
421 } 400 }
422 401
426 inline DiscoveredListIterator(DiscoveredList& refs_list, 405 inline DiscoveredListIterator(DiscoveredList& refs_list,
427 OopClosure* keep_alive, 406 OopClosure* keep_alive,
428 BoolObjectClosure* is_alive); 407 BoolObjectClosure* is_alive);
429 408
430 // End Of List. 409 // End Of List.
431 inline bool has_next() const { return _next != ReferenceProcessor::sentinel_ref(); } 410 inline bool has_next() const { return _ref != NULL; }
432 411
433 // Get oop to the Reference object. 412 // Get oop to the Reference object.
434 inline oop obj() const { return _ref; } 413 inline oop obj() const { return _ref; }
435 414
436 // Get oop to the referent object. 415 // Get oop to the referent object.
466 445
467 // Update the discovered field. 446 // Update the discovered field.
468 inline void update_discovered() { 447 inline void update_discovered() {
469 // First _prev_next ref actually points into DiscoveredList (gross). 448 // First _prev_next ref actually points into DiscoveredList (gross).
470 if (UseCompressedOops) { 449 if (UseCompressedOops) {
471 _keep_alive->do_oop((narrowOop*)_prev_next); 450 if (!oopDesc::is_null(*(narrowOop*)_prev_next)) {
451 _keep_alive->do_oop((narrowOop*)_prev_next);
452 }
472 } else { 453 } else {
473 _keep_alive->do_oop((oop*)_prev_next); 454 if (!oopDesc::is_null(*(oop*)_prev_next)) {
455 _keep_alive->do_oop((oop*)_prev_next);
456 }
474 } 457 }
475 } 458 }
476 459
477 // NULL out referent pointer. 460 // NULL out referent pointer.
478 inline void clear_referent() { oop_store_raw(_referent_addr, NULL); } 461 inline void clear_referent() { oop_store_raw(_referent_addr, NULL); }
486 inline void move_to_next(); 469 inline void move_to_next();
487 470
488 private: 471 private:
489 DiscoveredList& _refs_list; 472 DiscoveredList& _refs_list;
490 HeapWord* _prev_next; 473 HeapWord* _prev_next;
474 oop _prev;
491 oop _ref; 475 oop _ref;
492 HeapWord* _discovered_addr; 476 HeapWord* _discovered_addr;
493 oop _next; 477 oop _next;
494 HeapWord* _referent_addr; 478 HeapWord* _referent_addr;
495 oop _referent; 479 oop _referent;
507 inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList& refs_list, 491 inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList& refs_list,
508 OopClosure* keep_alive, 492 OopClosure* keep_alive,
509 BoolObjectClosure* is_alive) 493 BoolObjectClosure* is_alive)
510 : _refs_list(refs_list), 494 : _refs_list(refs_list),
511 _prev_next(refs_list.adr_head()), 495 _prev_next(refs_list.adr_head()),
496 _prev(NULL),
512 _ref(refs_list.head()), 497 _ref(refs_list.head()),
513 #ifdef ASSERT 498 #ifdef ASSERT
514 _first_seen(refs_list.head()), 499 _first_seen(refs_list.head()),
515 #endif 500 #endif
516 #ifndef PRODUCT 501 #ifndef PRODUCT
517 _processed(0), 502 _processed(0),
518 _removed(0), 503 _removed(0),
519 #endif 504 #endif
520 _next(refs_list.head()), 505 _next(NULL),
521 _keep_alive(keep_alive), 506 _keep_alive(keep_alive),
522 _is_alive(is_alive) 507 _is_alive(is_alive)
523 { } 508 { }
524 509
525 inline bool DiscoveredListIterator::is_referent_alive() const { 510 inline bool DiscoveredListIterator::is_referent_alive() const {
542 "bad referent"); 527 "bad referent");
543 } 528 }
544 529
545 inline void DiscoveredListIterator::next() { 530 inline void DiscoveredListIterator::next() {
546 _prev_next = _discovered_addr; 531 _prev_next = _discovered_addr;
532 _prev = _ref;
547 move_to_next(); 533 move_to_next();
548 } 534 }
549 535
550 inline void DiscoveredListIterator::remove() { 536 inline void DiscoveredListIterator::remove() {
551 assert(_ref->is_oop(), "Dropping a bad reference"); 537 assert(_ref->is_oop(), "Dropping a bad reference");
552 oop_store_raw(_discovered_addr, NULL); 538 oop_store_raw(_discovered_addr, NULL);
539
553 // First _prev_next ref actually points into DiscoveredList (gross). 540 // First _prev_next ref actually points into DiscoveredList (gross).
541 oop new_next;
542 if (_next == _ref) {
543 // At the end of the list, we should make _prev point to itself.
544 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
545 // and _prev will be NULL.
546 new_next = _prev;
547 } else {
548 new_next = _next;
549 }
550
554 if (UseCompressedOops) { 551 if (UseCompressedOops) {
555 // Remove Reference object from list. 552 // Remove Reference object from list.
556 oopDesc::encode_store_heap_oop_not_null((narrowOop*)_prev_next, _next); 553 oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next);
557 } else { 554 } else {
558 // Remove Reference object from list. 555 // Remove Reference object from list.
559 oopDesc::store_heap_oop((oop*)_prev_next, _next); 556 oopDesc::store_heap_oop((oop*)_prev_next, new_next);
560 } 557 }
561 NOT_PRODUCT(_removed++); 558 NOT_PRODUCT(_removed++);
562 _refs_list.dec_length(1); 559 _refs_list.dec_length(1);
563 } 560 }
564 561
565 inline void DiscoveredListIterator::move_to_next() { 562 inline void DiscoveredListIterator::move_to_next() {
566 _ref = _next; 563 if (_ref == _next) {
564 // End of the list.
565 _ref = NULL;
566 } else {
567 _ref = _next;
568 }
567 assert(_ref != _first_seen, "cyclic ref_list found"); 569 assert(_ref != _first_seen, "cyclic ref_list found");
568 NOT_PRODUCT(_processed++); 570 NOT_PRODUCT(_processed++);
569 } 571 }
570 572
571 // NOTE: process_phase*() are largely similar, and at a high level 573 // NOTE: process_phase*() are largely similar, and at a high level
723 iter.obj(), iter.obj()->blueprint()->internal_name()); 725 iter.obj(), iter.obj()->blueprint()->internal_name());
724 } 726 }
725 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference"); 727 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
726 iter.next(); 728 iter.next();
727 } 729 }
728 // Remember to keep sentinel pointer around 730 // Remember to update the next pointer of the last ref.
729 iter.update_discovered(); 731 iter.update_discovered();
730 // Close the reachable set 732 // Close the reachable set
731 complete_gc->do_void(); 733 complete_gc->do_void();
732 } 734 }
733 735
734 void 736 void
737 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
738 oop obj = NULL;
739 oop next = refs_list.head();
740 while (next != obj) {
741 obj = next;
742 next = java_lang_ref_Reference::discovered(obj);
743 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
744 }
745 refs_list.set_head(NULL);
746 refs_list.set_length(0);
747 }
748
749 void
735 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) { 750 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
736 oop obj = refs_list.head(); 751 clear_discovered_references(refs_list);
737 while (obj != sentinel_ref()) {
738 oop discovered = java_lang_ref_Reference::discovered(obj);
739 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
740 obj = discovered;
741 }
742 refs_list.set_head(sentinel_ref());
743 refs_list.set_length(0);
744 } 752 }
745 753
746 void ReferenceProcessor::abandon_partial_discovery() { 754 void ReferenceProcessor::abandon_partial_discovery() {
747 // loop over the lists 755 // loop over the lists
748 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) { 756 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
857 avg_refs - ref_lists[to_idx].length()); 865 avg_refs - ref_lists[to_idx].length());
858 } else { 866 } else {
859 refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs, 867 refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
860 avg_refs - ref_lists[to_idx].length()); 868 avg_refs - ref_lists[to_idx].length());
861 } 869 }
870
871 assert(refs_to_move > 0, "otherwise the code below will fail");
872
862 oop move_head = ref_lists[from_idx].head(); 873 oop move_head = ref_lists[from_idx].head();
863 oop move_tail = move_head; 874 oop move_tail = move_head;
864 oop new_head = move_head; 875 oop new_head = move_head;
865 // find an element to split the list on 876 // find an element to split the list on
866 for (size_t j = 0; j < refs_to_move; ++j) { 877 for (size_t j = 0; j < refs_to_move; ++j) {
867 move_tail = new_head; 878 move_tail = new_head;
868 new_head = java_lang_ref_Reference::discovered(new_head); 879 new_head = java_lang_ref_Reference::discovered(new_head);
869 } 880 }
870 java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head()); 881
882 // Add the chain to the to list.
883 if (ref_lists[to_idx].head() == NULL) {
884 // to list is empty. Make a loop at the end.
885 java_lang_ref_Reference::set_discovered(move_tail, move_tail);
886 } else {
887 java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
888 }
871 ref_lists[to_idx].set_head(move_head); 889 ref_lists[to_idx].set_head(move_head);
872 ref_lists[to_idx].inc_length(refs_to_move); 890 ref_lists[to_idx].inc_length(refs_to_move);
873 ref_lists[from_idx].set_head(new_head); 891
892 // Remove the chain from the from list.
893 if (move_tail == new_head) {
894 // We found the end of the from list.
895 ref_lists[from_idx].set_head(NULL);
896 } else {
897 ref_lists[from_idx].set_head(new_head);
898 }
874 ref_lists[from_idx].dec_length(refs_to_move); 899 ref_lists[from_idx].dec_length(refs_to_move);
875 if (ref_lists[from_idx].length() == 0) { 900 if (ref_lists[from_idx].length() == 0) {
876 break; 901 break;
877 } 902 }
878 } else { 903 } else {
1080 HeapWord* discovered_addr) { 1105 HeapWord* discovered_addr) {
1081 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller"); 1106 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
1082 // First we must make sure this object is only enqueued once. CAS in a non null 1107 // First we must make sure this object is only enqueued once. CAS in a non null
1083 // discovered_addr. 1108 // discovered_addr.
1084 oop current_head = refs_list.head(); 1109 oop current_head = refs_list.head();
1110 // The last ref must have its discovered field pointing to itself.
1111 oop next_discovered = (current_head != NULL) ? current_head : obj;
1085 1112
1086 // Note: In the case of G1, this specific pre-barrier is strictly 1113 // Note: In the case of G1, this specific pre-barrier is strictly
1087 // not necessary because the only case we are interested in 1114 // not necessary because the only case we are interested in
1088 // here is when *discovered_addr is NULL (see the CAS further below), 1115 // here is when *discovered_addr is NULL (see the CAS further below),
1089 // so this will expand to nothing. As a result, we have manually 1116 // so this will expand to nothing. As a result, we have manually
1090 // elided this out for G1, but left in the test for some future 1117 // elided this out for G1, but left in the test for some future
1091 // collector that might have need for a pre-barrier here. 1118 // collector that might have need for a pre-barrier here.
1092 if (_discovered_list_needs_barrier && !UseG1GC) { 1119 if (_discovered_list_needs_barrier && !UseG1GC) {
1093 if (UseCompressedOops) { 1120 if (UseCompressedOops) {
1094 _bs->write_ref_field_pre((narrowOop*)discovered_addr, current_head); 1121 _bs->write_ref_field_pre((narrowOop*)discovered_addr, next_discovered);
1095 } else { 1122 } else {
1096 _bs->write_ref_field_pre((oop*)discovered_addr, current_head); 1123 _bs->write_ref_field_pre((oop*)discovered_addr, next_discovered);
1097 } 1124 }
1098 guarantee(false, "Need to check non-G1 collector"); 1125 guarantee(false, "Need to check non-G1 collector");
1099 } 1126 }
1100 oop retest = oopDesc::atomic_compare_exchange_oop(current_head, discovered_addr, 1127 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
1101 NULL); 1128 NULL);
1102 if (retest == NULL) { 1129 if (retest == NULL) {
1103 // This thread just won the right to enqueue the object. 1130 // This thread just won the right to enqueue the object.
1104 // We have separate lists for enqueueing so no synchronization 1131 // We have separate lists for enqueueing so no synchronization
1105 // is necessary. 1132 // is necessary.
1106 refs_list.set_head(obj); 1133 refs_list.set_head(obj);
1107 refs_list.inc_length(1); 1134 refs_list.inc_length(1);
1108 if (_discovered_list_needs_barrier) { 1135 if (_discovered_list_needs_barrier) {
1109 _bs->write_ref_field((void*)discovered_addr, current_head); 1136 _bs->write_ref_field((void*)discovered_addr, next_discovered);
1110 } 1137 }
1111 1138
1112 if (TraceReferenceGC) { 1139 if (TraceReferenceGC) {
1113 gclog_or_tty->print_cr("Enqueued reference (mt) (" INTPTR_FORMAT ": %s)", 1140 gclog_or_tty->print_cr("Enqueued reference (mt) (" INTPTR_FORMAT ": %s)",
1114 obj, obj->blueprint()->internal_name()); 1141 obj, obj->blueprint()->internal_name());
1260 // If "_discovered_list_needs_barrier", we do write barriers when 1287 // If "_discovered_list_needs_barrier", we do write barriers when
1261 // updating the discovered reference list. Otherwise, we do a raw store 1288 // updating the discovered reference list. Otherwise, we do a raw store
1262 // here: the field will be visited later when processing the discovered 1289 // here: the field will be visited later when processing the discovered
1263 // references. 1290 // references.
1264 oop current_head = list->head(); 1291 oop current_head = list->head();
1292 // The last ref must have its discovered field pointing to itself.
1293 oop next_discovered = (current_head != NULL) ? current_head : obj;
1294
1265 // As in the case further above, since we are over-writing a NULL 1295 // As in the case further above, since we are over-writing a NULL
1266 // pre-value, we can safely elide the pre-barrier here for the case of G1. 1296 // pre-value, we can safely elide the pre-barrier here for the case of G1.
1267 assert(discovered == NULL, "control point invariant"); 1297 assert(discovered == NULL, "control point invariant");
1268 if (_discovered_list_needs_barrier && !UseG1GC) { // safe to elide for G1 1298 if (_discovered_list_needs_barrier && !UseG1GC) { // safe to elide for G1
1269 if (UseCompressedOops) { 1299 if (UseCompressedOops) {
1270 _bs->write_ref_field_pre((narrowOop*)discovered_addr, current_head); 1300 _bs->write_ref_field_pre((narrowOop*)discovered_addr, next_discovered);
1271 } else { 1301 } else {
1272 _bs->write_ref_field_pre((oop*)discovered_addr, current_head); 1302 _bs->write_ref_field_pre((oop*)discovered_addr, next_discovered);
1273 } 1303 }
1274 guarantee(false, "Need to check non-G1 collector"); 1304 guarantee(false, "Need to check non-G1 collector");
1275 } 1305 }
1276 oop_store_raw(discovered_addr, current_head); 1306 oop_store_raw(discovered_addr, next_discovered);
1277 if (_discovered_list_needs_barrier) { 1307 if (_discovered_list_needs_barrier) {
1278 _bs->write_ref_field((void*)discovered_addr, current_head); 1308 _bs->write_ref_field((void*)discovered_addr, next_discovered);
1279 } 1309 }
1280 list->set_head(obj); 1310 list->set_head(obj);
1281 list->inc_length(1); 1311 list->inc_length(1);
1282 1312
1283 if (TraceReferenceGC) { 1313 if (TraceReferenceGC) {
1435 void ReferenceProcessor::verify_ok_to_handle_reflists() { 1465 void ReferenceProcessor::verify_ok_to_handle_reflists() {
1436 // empty for now 1466 // empty for now
1437 } 1467 }
1438 #endif 1468 #endif
1439 1469
1440 void ReferenceProcessor::verify() {
1441 guarantee(sentinel_ref() != NULL && sentinel_ref()->is_oop(), "Lost _sentinelRef");
1442 }
1443
1444 #ifndef PRODUCT 1470 #ifndef PRODUCT
1445 void ReferenceProcessor::clear_discovered_references() { 1471 void ReferenceProcessor::clear_discovered_references() {
1446 guarantee(!_discovering_refs, "Discovering refs?"); 1472 guarantee(!_discovering_refs, "Discovering refs?");
1447 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) { 1473 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
1448 oop obj = _discoveredSoftRefs[i].head(); 1474 clear_discovered_references(_discoveredSoftRefs[i]);
1449 while (obj != sentinel_ref()) { 1475 }
1450 oop next = java_lang_ref_Reference::discovered(obj); 1476 }
1451 java_lang_ref_Reference::set_discovered(obj, (oop) NULL); 1477
1452 obj = next;
1453 }
1454 _discoveredSoftRefs[i].set_head(sentinel_ref());
1455 _discoveredSoftRefs[i].set_length(0);
1456 }
1457 }
1458 #endif // PRODUCT 1478 #endif // PRODUCT