comparison src/share/vm/memory/referenceProcessor.cpp @ 14521:29ccc4cbabca

Merge
author Gilles Duboscq <duboscq@ssw.jku.at>
date Wed, 12 Mar 2014 13:30:08 +0100
parents d8041d695d19
children 4ca6dc0799b6
comparison
equal deleted inserted replaced
14520:f84115370178 14521:29ccc4cbabca
43 void referenceProcessor_init() { 43 void referenceProcessor_init() {
44 ReferenceProcessor::init_statics(); 44 ReferenceProcessor::init_statics();
45 } 45 }
46 46
47 void ReferenceProcessor::init_statics() { 47 void ReferenceProcessor::init_statics() {
48 // We need a monotonically non-deccreasing time in ms but 48 // We need a monotonically non-decreasing time in ms but
49 // os::javaTimeMillis() does not guarantee monotonicity. 49 // os::javaTimeMillis() does not guarantee monotonicity.
50 jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC; 50 jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
51 51
52 // Initialize the soft ref timestamp clock. 52 // Initialize the soft ref timestamp clock.
53 _soft_ref_timestamp_clock = now; 53 _soft_ref_timestamp_clock = now;
60 if (_always_clear_soft_ref_policy == NULL || _default_soft_ref_policy == NULL) { 60 if (_always_clear_soft_ref_policy == NULL || _default_soft_ref_policy == NULL) {
61 vm_exit_during_initialization("Could not allocate reference policy object"); 61 vm_exit_during_initialization("Could not allocate reference policy object");
62 } 62 }
63 guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery || 63 guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery ||
64 RefDiscoveryPolicy == ReferentBasedDiscovery, 64 RefDiscoveryPolicy == ReferentBasedDiscovery,
65 "Unrecongnized RefDiscoveryPolicy"); 65 "Unrecognized RefDiscoveryPolicy");
66 _pending_list_uses_discovered_field = JDK_Version::current().pending_list_uses_discovered_field(); 66 _pending_list_uses_discovered_field = JDK_Version::current().pending_list_uses_discovered_field();
67 } 67 }
68 68
69 void ReferenceProcessor::enable_discovery(bool verify_disabled, bool check_no_refs) { 69 void ReferenceProcessor::enable_discovery(bool verify_disabled, bool check_no_refs) {
70 #ifdef ASSERT 70 #ifdef ASSERT
93 uint mt_processing_degree, 93 uint mt_processing_degree,
94 bool mt_discovery, 94 bool mt_discovery,
95 uint mt_discovery_degree, 95 uint mt_discovery_degree,
96 bool atomic_discovery, 96 bool atomic_discovery,
97 BoolObjectClosure* is_alive_non_header, 97 BoolObjectClosure* is_alive_non_header,
98 bool discovered_list_needs_barrier) : 98 bool discovered_list_needs_post_barrier) :
99 _discovering_refs(false), 99 _discovering_refs(false),
100 _enqueuing_is_done(false), 100 _enqueuing_is_done(false),
101 _is_alive_non_header(is_alive_non_header), 101 _is_alive_non_header(is_alive_non_header),
102 _discovered_list_needs_barrier(discovered_list_needs_barrier), 102 _discovered_list_needs_post_barrier(discovered_list_needs_post_barrier),
103 _bs(NULL),
104 _processing_is_mt(mt_processing), 103 _processing_is_mt(mt_processing),
105 _next_id(0) 104 _next_id(0)
106 { 105 {
107 _span = span; 106 _span = span;
108 _discovery_is_atomic = atomic_discovery; 107 _discovery_is_atomic = atomic_discovery;
124 for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) { 123 for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
125 _discovered_refs[i].set_head(NULL); 124 _discovered_refs[i].set_head(NULL);
126 _discovered_refs[i].set_length(0); 125 _discovered_refs[i].set_length(0);
127 } 126 }
128 127
129 // If we do barriers, cache a copy of the barrier set.
130 if (discovered_list_needs_barrier) {
131 _bs = Universe::heap()->barrier_set();
132 }
133 setup_policy(false /* default soft ref policy */); 128 setup_policy(false /* default soft ref policy */);
134 } 129 }
135 130
136 #ifndef PRODUCT 131 #ifndef PRODUCT
137 void ReferenceProcessor::verify_no_references_recorded() { 132 void ReferenceProcessor::verify_no_references_recorded() {
155 150
156 void ReferenceProcessor::update_soft_ref_master_clock() { 151 void ReferenceProcessor::update_soft_ref_master_clock() {
157 // Update (advance) the soft ref master clock field. This must be done 152 // Update (advance) the soft ref master clock field. This must be done
158 // after processing the soft ref list. 153 // after processing the soft ref list.
159 154
160 // We need a monotonically non-deccreasing time in ms but 155 // We need a monotonically non-decreasing time in ms but
161 // os::javaTimeMillis() does not guarantee monotonicity. 156 // os::javaTimeMillis() does not guarantee monotonicity.
162 jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC; 157 jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC;
163 jlong soft_ref_clock = java_lang_ref_SoftReference::clock(); 158 jlong soft_ref_clock = java_lang_ref_SoftReference::clock();
164 assert(soft_ref_clock == _soft_ref_timestamp_clock, "soft ref clocks out of sync"); 159 assert(soft_ref_clock == _soft_ref_timestamp_clock, "soft ref clocks out of sync");
165 160
171 ) 166 )
172 // The values of now and _soft_ref_timestamp_clock are set using 167 // The values of now and _soft_ref_timestamp_clock are set using
173 // javaTimeNanos(), which is guaranteed to be monotonically 168 // javaTimeNanos(), which is guaranteed to be monotonically
174 // non-decreasing provided the underlying platform provides such 169 // non-decreasing provided the underlying platform provides such
175 // a time source (and it is bug free). 170 // a time source (and it is bug free).
176 // In product mode, however, protect ourselves from non-monotonicty. 171 // In product mode, however, protect ourselves from non-monotonicity.
177 if (now > _soft_ref_timestamp_clock) { 172 if (now > _soft_ref_timestamp_clock) {
178 _soft_ref_timestamp_clock = now; 173 _soft_ref_timestamp_clock = now;
179 java_lang_ref_SoftReference::set_clock(now); 174 java_lang_ref_SoftReference::set_clock(now);
180 } 175 }
181 // Else leave clock stalled at its old value until time progresses 176 // Else leave clock stalled at its old value until time progresses
315 T old_pending_list_value = *pending_list_addr; 310 T old_pending_list_value = *pending_list_addr;
316 311
317 // Enqueue references that are not made active again, and 312 // Enqueue references that are not made active again, and
318 // clear the decks for the next collection (cycle). 313 // clear the decks for the next collection (cycle).
319 ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor); 314 ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor);
320 // Do the oop-check on pending_list_addr missed in 315 // Do the post-barrier on pending_list_addr missed in
321 // enqueue_discovered_reflist. We should probably 316 // enqueue_discovered_reflist.
322 // do a raw oop_check so that future such idempotent 317 oopDesc::bs()->write_ref_field(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr));
323 // oop_stores relying on the oop-check side-effect
324 // may be elided automatically and safely without
325 // affecting correctness.
326 oop_store(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr));
327 318
328 // Stop treating discovered references specially. 319 // Stop treating discovered references specially.
329 ref->disable_discovery(); 320 ref->disable_discovery();
330 321
331 // Return true if new pending references were added 322 // Return true if new pending references were added
356 INTPTR_FORMAT, (address)refs_list.head()); 347 INTPTR_FORMAT, (address)refs_list.head());
357 } 348 }
358 349
359 oop obj = NULL; 350 oop obj = NULL;
360 oop next_d = refs_list.head(); 351 oop next_d = refs_list.head();
361 if (pending_list_uses_discovered_field()) { // New behaviour 352 if (pending_list_uses_discovered_field()) { // New behavior
362 // Walk down the list, self-looping the next field 353 // Walk down the list, self-looping the next field
363 // so that the References are not considered active. 354 // so that the References are not considered active.
364 while (obj != next_d) { 355 while (obj != next_d) {
365 obj = next_d; 356 obj = next_d;
366 assert(obj->is_instanceRef(), "should be reference object"); 357 assert(obj->is_instanceRef(), "should be reference object");
370 (void *)obj, (void *)next_d); 361 (void *)obj, (void *)next_d);
371 } 362 }
372 assert(java_lang_ref_Reference::next(obj) == NULL, 363 assert(java_lang_ref_Reference::next(obj) == NULL,
373 "Reference not active; should not be discovered"); 364 "Reference not active; should not be discovered");
374 // Self-loop next, so as to make Ref not active. 365 // Self-loop next, so as to make Ref not active.
375 java_lang_ref_Reference::set_next(obj, obj); 366 // Post-barrier not needed when looping to self.
367 java_lang_ref_Reference::set_next_raw(obj, obj);
376 if (next_d == obj) { // obj is last 368 if (next_d == obj) { // obj is last
377 // Swap refs_list into pendling_list_addr and 369 // Swap refs_list into pending_list_addr and
378 // set obj's discovered to what we read from pending_list_addr. 370 // set obj's discovered to what we read from pending_list_addr.
379 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr); 371 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
380 // Need oop_check on pending_list_addr above; 372 // Need post-barrier on pending_list_addr above;
381 // see special oop-check code at the end of 373 // see special post-barrier code at the end of
382 // enqueue_discovered_reflists() further below. 374 // enqueue_discovered_reflists() further below.
383 java_lang_ref_Reference::set_discovered(obj, old); // old may be NULL 375 java_lang_ref_Reference::set_discovered_raw(obj, old); // old may be NULL
376 oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(obj), old);
384 } 377 }
385 } 378 }
386 } else { // Old behaviour 379 } else { // Old behavior
387 // Walk down the list, copying the discovered field into 380 // Walk down the list, copying the discovered field into
388 // the next field and clearing the discovered field. 381 // the next field and clearing the discovered field.
389 while (obj != next_d) { 382 while (obj != next_d) {
390 obj = next_d; 383 obj = next_d;
391 assert(obj->is_instanceRef(), "should be reference object"); 384 assert(obj->is_instanceRef(), "should be reference object");
395 (void *)obj, (void *)next_d); 388 (void *)obj, (void *)next_d);
396 } 389 }
397 assert(java_lang_ref_Reference::next(obj) == NULL, 390 assert(java_lang_ref_Reference::next(obj) == NULL,
398 "The reference should not be enqueued"); 391 "The reference should not be enqueued");
399 if (next_d == obj) { // obj is last 392 if (next_d == obj) { // obj is last
400 // Swap refs_list into pendling_list_addr and 393 // Swap refs_list into pending_list_addr and
401 // set obj's next to what we read from pending_list_addr. 394 // set obj's next to what we read from pending_list_addr.
402 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr); 395 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
403 // Need oop_check on pending_list_addr above; 396 // Need oop_check on pending_list_addr above;
404 // see special oop-check code at the end of 397 // see special oop-check code at the end of
405 // enqueue_discovered_reflists() further below. 398 // enqueue_discovered_reflists() further below.
495 // and _prev will be NULL. 488 // and _prev will be NULL.
496 new_next = _prev; 489 new_next = _prev;
497 } else { 490 } else {
498 new_next = _next; 491 new_next = _next;
499 } 492 }
500 493 // Remove Reference object from discovered list. Note that G1 does not need a
501 if (UseCompressedOops) { 494 // pre-barrier here because we know the Reference has already been found/marked,
502 // Remove Reference object from list. 495 // that's how it ended up in the discovered list in the first place.
503 oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next); 496 oop_store_raw(_prev_next, new_next);
504 } else { 497 if (_discovered_list_needs_post_barrier && _prev_next != _refs_list.adr_head()) {
505 // Remove Reference object from list. 498 // Needs post-barrier and this is not the list head (which is not on the heap)
506 oopDesc::store_heap_oop((oop*)_prev_next, new_next); 499 oopDesc::bs()->write_ref_field(_prev_next, new_next);
507 } 500 }
508 NOT_PRODUCT(_removed++); 501 NOT_PRODUCT(_removed++);
509 _refs_list.dec_length(1); 502 _refs_list.dec_length(1);
510 } 503 }
511 504
514 // For G1 we don't want to use set_next - it 507 // For G1 we don't want to use set_next - it
515 // will dirty the card for the next field of 508 // will dirty the card for the next field of
516 // the reference object and will fail 509 // the reference object and will fail
517 // CT verification. 510 // CT verification.
518 if (UseG1GC) { 511 if (UseG1GC) {
519 BarrierSet* bs = oopDesc::bs();
520 HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref); 512 HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
521
522 if (UseCompressedOops) { 513 if (UseCompressedOops) {
523 bs->write_ref_field_pre((narrowOop*)next_addr, NULL); 514 oopDesc::bs()->write_ref_field_pre((narrowOop*)next_addr, NULL);
524 } else { 515 } else {
525 bs->write_ref_field_pre((oop*)next_addr, NULL); 516 oopDesc::bs()->write_ref_field_pre((oop*)next_addr, NULL);
526 } 517 }
527 java_lang_ref_Reference::set_next_raw(_ref, NULL); 518 java_lang_ref_Reference::set_next_raw(_ref, NULL);
528 } else { 519 } else {
529 java_lang_ref_Reference::set_next(_ref, NULL); 520 java_lang_ref_Reference::set_next(_ref, NULL);
530 } 521 }
551 ReferencePolicy* policy, 542 ReferencePolicy* policy,
552 BoolObjectClosure* is_alive, 543 BoolObjectClosure* is_alive,
553 OopClosure* keep_alive, 544 OopClosure* keep_alive,
554 VoidClosure* complete_gc) { 545 VoidClosure* complete_gc) {
555 assert(policy != NULL, "Must have a non-NULL policy"); 546 assert(policy != NULL, "Must have a non-NULL policy");
556 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); 547 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
557 // Decide which softly reachable refs should be kept alive. 548 // Decide which softly reachable refs should be kept alive.
558 while (iter.has_next()) { 549 while (iter.has_next()) {
559 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */)); 550 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
560 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive(); 551 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
561 if (referent_is_dead && 552 if (referent_is_dead &&
591 void 582 void
592 ReferenceProcessor::pp2_work(DiscoveredList& refs_list, 583 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
593 BoolObjectClosure* is_alive, 584 BoolObjectClosure* is_alive,
594 OopClosure* keep_alive) { 585 OopClosure* keep_alive) {
595 assert(discovery_is_atomic(), "Error"); 586 assert(discovery_is_atomic(), "Error");
596 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); 587 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
597 while (iter.has_next()) { 588 while (iter.has_next()) {
598 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */)); 589 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
599 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());) 590 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
600 assert(next == NULL, "Should not discover inactive Reference"); 591 assert(next == NULL, "Should not discover inactive Reference");
601 if (iter.is_referent_alive()) { 592 if (iter.is_referent_alive()) {
628 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list, 619 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
629 BoolObjectClosure* is_alive, 620 BoolObjectClosure* is_alive,
630 OopClosure* keep_alive, 621 OopClosure* keep_alive,
631 VoidClosure* complete_gc) { 622 VoidClosure* complete_gc) {
632 assert(!discovery_is_atomic(), "Error"); 623 assert(!discovery_is_atomic(), "Error");
633 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); 624 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
634 while (iter.has_next()) { 625 while (iter.has_next()) {
635 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */)); 626 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
636 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj()); 627 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
637 oop next = java_lang_ref_Reference::next(iter.obj()); 628 oop next = java_lang_ref_Reference::next(iter.obj());
638 if ((iter.referent() == NULL || iter.is_referent_alive() || 629 if ((iter.referent() == NULL || iter.is_referent_alive() ||
671 bool clear_referent, 662 bool clear_referent,
672 BoolObjectClosure* is_alive, 663 BoolObjectClosure* is_alive,
673 OopClosure* keep_alive, 664 OopClosure* keep_alive,
674 VoidClosure* complete_gc) { 665 VoidClosure* complete_gc) {
675 ResourceMark rm; 666 ResourceMark rm;
676 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); 667 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
677 while (iter.has_next()) { 668 while (iter.has_next()) {
678 iter.update_discovered(); 669 iter.update_discovered();
679 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */)); 670 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
680 if (clear_referent) { 671 if (clear_referent) {
681 // NULL out referent pointer 672 // NULL out referent pointer
788 private: 779 private:
789 bool _clear_referent; 780 bool _clear_referent;
790 }; 781 };
791 782
792 void ReferenceProcessor::set_discovered(oop ref, oop value) { 783 void ReferenceProcessor::set_discovered(oop ref, oop value) {
793 if (_discovered_list_needs_barrier) { 784 java_lang_ref_Reference::set_discovered_raw(ref, value);
794 java_lang_ref_Reference::set_discovered(ref, value); 785 if (_discovered_list_needs_post_barrier) {
795 } else { 786 oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(ref), value);
796 java_lang_ref_Reference::set_discovered_raw(ref, value);
797 } 787 }
798 } 788 }
799 789
800 // Balances reference queues. 790 // Balances reference queues.
801 // Move entries from all queues[0, 1, ..., _max_num_q-1] to 791 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
988 } 978 }
989 } 979 }
990 980
991 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) { 981 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
992 assert(!discovery_is_atomic(), "Else why call this method?"); 982 assert(!discovery_is_atomic(), "Else why call this method?");
993 DiscoveredListIterator iter(refs_list, NULL, NULL); 983 DiscoveredListIterator iter(refs_list, NULL, NULL, _discovered_list_needs_post_barrier);
994 while (iter.has_next()) { 984 while (iter.has_next()) {
995 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */)); 985 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
996 oop next = java_lang_ref_Reference::next(iter.obj()); 986 oop next = java_lang_ref_Reference::next(iter.obj());
997 assert(next->is_oop_or_null(), "bad next field"); 987 assert(next->is_oop_or_null(), "bad next field");
998 // If referent has been cleared or Reference is not active, 988 // If referent has been cleared or Reference is not active,
1083 // not necessary because the only case we are interested in 1073 // not necessary because the only case we are interested in
1084 // here is when *discovered_addr is NULL (see the CAS further below), 1074 // here is when *discovered_addr is NULL (see the CAS further below),
1085 // so this will expand to nothing. As a result, we have manually 1075 // so this will expand to nothing. As a result, we have manually
1086 // elided this out for G1, but left in the test for some future 1076 // elided this out for G1, but left in the test for some future
1087 // collector that might have need for a pre-barrier here, e.g.:- 1077 // collector that might have need for a pre-barrier here, e.g.:-
1088 // _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered); 1078 // oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1089 assert(!_discovered_list_needs_barrier || UseG1GC, 1079 assert(!_discovered_list_needs_post_barrier || UseG1GC,
1090 "Need to check non-G1 collector: " 1080 "Need to check non-G1 collector: "
1091 "may need a pre-write-barrier for CAS from NULL below"); 1081 "may need a pre-write-barrier for CAS from NULL below");
1092 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr, 1082 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
1093 NULL); 1083 NULL);
1094 if (retest == NULL) { 1084 if (retest == NULL) {
1095 // This thread just won the right to enqueue the object. 1085 // This thread just won the right to enqueue the object.
1096 // We have separate lists for enqueueing, so no synchronization 1086 // We have separate lists for enqueueing, so no synchronization
1097 // is necessary. 1087 // is necessary.
1098 refs_list.set_head(obj); 1088 refs_list.set_head(obj);
1099 refs_list.inc_length(1); 1089 refs_list.inc_length(1);
1100 if (_discovered_list_needs_barrier) { 1090 if (_discovered_list_needs_post_barrier) {
1101 _bs->write_ref_field((void*)discovered_addr, next_discovered); 1091 oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered);
1102 } 1092 }
1103 1093
1104 if (TraceReferenceGC) { 1094 if (TraceReferenceGC) {
1105 gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)", 1095 gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
1106 (void *)obj, obj->klass()->internal_name()); 1096 (void *)obj, obj->klass()->internal_name());
1248 } 1238 }
1249 1239
1250 if (_discovery_is_mt) { 1240 if (_discovery_is_mt) {
1251 add_to_discovered_list_mt(*list, obj, discovered_addr); 1241 add_to_discovered_list_mt(*list, obj, discovered_addr);
1252 } else { 1242 } else {
1253 // If "_discovered_list_needs_barrier", we do write barriers when 1243 // If "_discovered_list_needs_post_barrier", we do write barriers when
1254 // updating the discovered reference list. Otherwise, we do a raw store 1244 // updating the discovered reference list. Otherwise, we do a raw store
1255 // here: the field will be visited later when processing the discovered 1245 // here: the field will be visited later when processing the discovered
1256 // references. 1246 // references.
1257 oop current_head = list->head(); 1247 oop current_head = list->head();
1258 // The last ref must have its discovered field pointing to itself. 1248 // The last ref must have its discovered field pointing to itself.
1259 oop next_discovered = (current_head != NULL) ? current_head : obj; 1249 oop next_discovered = (current_head != NULL) ? current_head : obj;
1260 1250
1261 // As in the case further above, since we are over-writing a NULL 1251 // As in the case further above, since we are over-writing a NULL
1262 // pre-value, we can safely elide the pre-barrier here for the case of G1. 1252 // pre-value, we can safely elide the pre-barrier here for the case of G1.
1263 // e.g.:- _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered); 1253 // e.g.:- oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1264 assert(discovered == NULL, "control point invariant"); 1254 assert(discovered == NULL, "control point invariant");
1265 assert(!_discovered_list_needs_barrier || UseG1GC, 1255 assert(!_discovered_list_needs_post_barrier || UseG1GC,
1266 "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below"); 1256 "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below");
1267 oop_store_raw(discovered_addr, next_discovered); 1257 oop_store_raw(discovered_addr, next_discovered);
1268 if (_discovered_list_needs_barrier) { 1258 if (_discovered_list_needs_post_barrier) {
1269 _bs->write_ref_field((void*)discovered_addr, next_discovered); 1259 oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered);
1270 } 1260 }
1271 list->set_head(obj); 1261 list->set_head(obj);
1272 list->inc_length(1); 1262 list->inc_length(1);
1273 1263
1274 if (TraceReferenceGC) { 1264 if (TraceReferenceGC) {
1349 1339
1350 // Walk the given discovered ref list, and remove all reference objects 1340 // Walk the given discovered ref list, and remove all reference objects
1351 // whose referents are still alive, whose referents are NULL or which 1341 // whose referents are still alive, whose referents are NULL or which
1352 // are not active (have a non-NULL next field). NOTE: When we are 1342 // are not active (have a non-NULL next field). NOTE: When we are
1353 // thus precleaning the ref lists (which happens single-threaded today), 1343 // thus precleaning the ref lists (which happens single-threaded today),
1354 // we do not disable refs discovery to honour the correct semantics of 1344 // we do not disable refs discovery to honor the correct semantics of
1355 // java.lang.Reference. As a result, we need to be careful below 1345 // java.lang.Reference. As a result, we need to be careful below
1356 // that ref removal steps interleave safely with ref discovery steps 1346 // that ref removal steps interleave safely with ref discovery steps
1357 // (in this thread). 1347 // (in this thread).
1358 void 1348 void
1359 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list, 1349 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
1360 BoolObjectClosure* is_alive, 1350 BoolObjectClosure* is_alive,
1361 OopClosure* keep_alive, 1351 OopClosure* keep_alive,
1362 VoidClosure* complete_gc, 1352 VoidClosure* complete_gc,
1363 YieldClosure* yield) { 1353 YieldClosure* yield) {
1364 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); 1354 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
1365 while (iter.has_next()) { 1355 while (iter.has_next()) {
1366 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */)); 1356 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1367 oop obj = iter.obj(); 1357 oop obj = iter.obj();
1368 oop next = java_lang_ref_Reference::next(obj); 1358 oop next = java_lang_ref_Reference::next(obj);
1369 if (iter.referent() == NULL || iter.is_referent_alive() || 1359 if (iter.referent() == NULL || iter.is_referent_alive() ||