comparison src/share/vm/gc_implementation/parNew/parGCAllocBuffer.cpp @ 481:7d7a7c599c17

6578152: fill_region_with_object has usability and safety issues Reviewed-by: apetrusenko, ysr
author jcoomes
date Thu, 11 Dec 2008 12:05:08 -0800
parents 387a62b4be60
children 0fbdb4381b99
comparison
equal deleted inserted replaced
480:d249b360e026 481:7d7a7c599c17
49 void ParGCAllocBuffer::retire(bool end_of_gc, bool retain) { 49 void ParGCAllocBuffer::retire(bool end_of_gc, bool retain) {
50 assert(!retain || end_of_gc, "Can only retain at GC end."); 50 assert(!retain || end_of_gc, "Can only retain at GC end.");
51 if (_retained) { 51 if (_retained) {
52 // If the buffer had been retained shorten the previous filler object. 52 // If the buffer had been retained shorten the previous filler object.
53 assert(_retained_filler.end() <= _top, "INVARIANT"); 53 assert(_retained_filler.end() <= _top, "INVARIANT");
54 SharedHeap::fill_region_with_object(_retained_filler); 54 CollectedHeap::fill_with_object(_retained_filler);
55 // Wasted space book-keeping, otherwise (normally) done in invalidate() 55 // Wasted space book-keeping, otherwise (normally) done in invalidate()
56 _wasted += _retained_filler.word_size(); 56 _wasted += _retained_filler.word_size();
57 _retained = false; 57 _retained = false;
58 } 58 }
59 assert(!end_of_gc || !_retained, "At this point, end_of_gc ==> !_retained."); 59 assert(!end_of_gc || !_retained, "At this point, end_of_gc ==> !_retained.");
60 if (_top < _hard_end) { 60 if (_top < _hard_end) {
61 SharedHeap::fill_region_with_object(MemRegion(_top, _hard_end)); 61 CollectedHeap::fill_with_object(_top, _hard_end);
62 if (!retain) { 62 if (!retain) {
63 invalidate(); 63 invalidate();
64 } else { 64 } else {
65 // Is there wasted space we'd like to retain for the next GC? 65 // Is there wasted space we'd like to retain for the next GC?
66 if (pointer_delta(_end, _top) > FillerHeaderSize) { 66 if (pointer_delta(_end, _top) > FillerHeaderSize) {
153 // right of the current allocation point, top), we use the "contig" 153 // right of the current allocation point, top), we use the "contig"
154 // parameter below to directly manipulate the shared array without 154 // parameter below to directly manipulate the shared array without
155 // modifying the _next_threshold state in the BOT. 155 // modifying the _next_threshold state in the BOT.
156 void ParGCAllocBufferWithBOT::fill_region_with_block(MemRegion mr, 156 void ParGCAllocBufferWithBOT::fill_region_with_block(MemRegion mr,
157 bool contig) { 157 bool contig) {
158 SharedHeap::fill_region_with_object(mr); 158 CollectedHeap::fill_with_object(mr);
159 if (contig) { 159 if (contig) {
160 _bt.alloc_block(mr.start(), mr.end()); 160 _bt.alloc_block(mr.start(), mr.end());
161 } else { 161 } else {
162 _bt.BlockOffsetArray::alloc_block(mr.start(), mr.end()); 162 _bt.BlockOffsetArray::alloc_block(mr.start(), mr.end());
163 } 163 }
169 assert((HeapWord*)align_size_down(intptr_t(_hard_end), 169 assert((HeapWord*)align_size_down(intptr_t(_hard_end),
170 ChunkSizeInBytes) == _hard_end, 170 ChunkSizeInBytes) == _hard_end,
171 "or else _true_end should be equal to _hard_end"); 171 "or else _true_end should be equal to _hard_end");
172 assert(_retained, "or else _true_end should be equal to _hard_end"); 172 assert(_retained, "or else _true_end should be equal to _hard_end");
173 assert(_retained_filler.end() <= _top, "INVARIANT"); 173 assert(_retained_filler.end() <= _top, "INVARIANT");
174 SharedHeap::fill_region_with_object(_retained_filler); 174 CollectedHeap::fill_with_object(_retained_filler);
175 if (_top < _hard_end) { 175 if (_top < _hard_end) {
176 fill_region_with_block(MemRegion(_top, _hard_end), true); 176 fill_region_with_block(MemRegion(_top, _hard_end), true);
177 } 177 }
178 HeapWord* next_hard_end = MIN2(_true_end, _hard_end + ChunkSizeInWords); 178 HeapWord* next_hard_end = MIN2(_true_end, _hard_end + ChunkSizeInWords);
179 _retained_filler = MemRegion(_hard_end, FillerHeaderSize); 179 _retained_filler = MemRegion(_hard_end, FillerHeaderSize);
314 if (_top <= chunk_boundary) { 314 if (_top <= chunk_boundary) {
315 assert(_true_end == _hard_end, "Invariant."); 315 assert(_true_end == _hard_end, "Invariant.");
316 while (_top <= chunk_boundary) { 316 while (_top <= chunk_boundary) {
317 assert(pointer_delta(_hard_end, chunk_boundary) >= AlignmentReserve, 317 assert(pointer_delta(_hard_end, chunk_boundary) >= AlignmentReserve,
318 "Consequence of last card handling above."); 318 "Consequence of last card handling above.");
319 MemRegion chunk_portion(chunk_boundary, _hard_end); 319 _bt.BlockOffsetArray::alloc_block(chunk_boundary, _hard_end);
320 _bt.BlockOffsetArray::alloc_block(chunk_portion.start(), 320 CollectedHeap::fill_with_object(chunk_boundary, _hard_end);
321 chunk_portion.end()); 321 _hard_end = chunk_boundary;
322 SharedHeap::fill_region_with_object(chunk_portion);
323 _hard_end = chunk_portion.start();
324 chunk_boundary -= ChunkSizeInWords; 322 chunk_boundary -= ChunkSizeInWords;
325 } 323 }
326 _end = _hard_end - AlignmentReserve; 324 _end = _hard_end - AlignmentReserve;
327 assert(_top <= _end, "Invariant."); 325 assert(_top <= _end, "Invariant.");
328 // Now reset the initial filler chunk so it doesn't overlap with 326 // Now reset the initial filler chunk so it doesn't overlap with