comparison src/share/vm/gc_implementation/g1/heapRegion.hpp @ 1886:72a161e62cc4

6991377: G1: race between concurrent refinement and humongous object allocation Summary: There is a race between the concurrent refinement threads and the humongous object allocation that can cause the concurrent refinement threads to corrupt the part of the BOT that it is being initialized by the humongous object allocation operation. The solution is to do the humongous object allocation in careful steps to ensure that the concurrent refinement threads always have a consistent view over the BOT, region contents, and top. The fix includes some very minor tidying up in sparsePRT. Reviewed-by: jcoomes, johnc, ysr
author tonyp
date Sat, 16 Oct 2010 17:12:19 -0400
parents 5cbac8938c4c
children f95d63e2154a
comparison
equal deleted inserted replaced
1885:a5c514e74487 1886:72a161e62cc4
393 return _humongous_start_region; 393 return _humongous_start_region;
394 } 394 }
395 395
396 // Causes the current region to represent a humongous object spanning "n" 396 // Causes the current region to represent a humongous object spanning "n"
397 // regions. 397 // regions.
398 virtual void set_startsHumongous(); 398 void set_startsHumongous(HeapWord* new_end);
399 399
400 // The regions that continue a humongous sequence should be added using 400 // The regions that continue a humongous sequence should be added using
401 // this method, in increasing address order. 401 // this method, in increasing address order.
402 void set_continuesHumongous(HeapRegion* start); 402 void set_continuesHumongous(HeapRegion* start);
403
404 void add_continuingHumongousRegion(HeapRegion* cont);
405 403
406 // If the region has a remembered set, return a pointer to it. 404 // If the region has a remembered set, return a pointer to it.
407 HeapRegionRemSet* rem_set() const { 405 HeapRegionRemSet* rem_set() const {
408 return _rem_set; 406 return _rem_set;
409 } 407 }
731 HeapWord* 729 HeapWord*
732 oops_on_card_seq_iterate_careful(MemRegion mr, 730 oops_on_card_seq_iterate_careful(MemRegion mr,
733 FilterOutOfRegionClosure* cl, 731 FilterOutOfRegionClosure* cl,
734 bool filter_young); 732 bool filter_young);
735 733
736 // The region "mr" is entirely in "this", and starts and ends at block
737 // boundaries. The caller declares that all the contained blocks are
738 // coalesced into one.
739 void declare_filled_region_to_BOT(MemRegion mr) {
740 _offsets.single_block(mr.start(), mr.end());
741 }
742
743 // A version of block start that is guaranteed to find *some* block 734 // A version of block start that is guaranteed to find *some* block
744 // boundary at or before "p", but does not object iteration, and may 735 // boundary at or before "p", but does not object iteration, and may
745 // therefore be used safely when the heap is unparseable. 736 // therefore be used safely when the heap is unparseable.
746 HeapWord* block_start_careful(const void* p) const { 737 HeapWord* block_start_careful(const void* p) const {
747 return _offsets.block_start_careful(p); 738 return _offsets.block_start_careful(p);