comparison src/share/vm/gc_implementation/parallelScavenge/psOldGen.cpp @ 271:818a18cd69a8

6730514: assertion failure in mangling code when expanding by 0 bytes Summary: An expansion by 0 bytes was not anticipated when the assertion was composed. Reviewed-by: jjh, jcoomes, apetrusenko
author jmasa
date Wed, 30 Jul 2008 11:54:00 -0700
parents 12eea04c8b06
children 9ee9cf798b59 a4b729f5b611
comparison
equal deleted inserted replaced
269:850fdf70db2b 271:818a18cd69a8
213 } 213 }
214 return cas_allocate_noexpand(word_size); 214 return cas_allocate_noexpand(word_size);
215 } 215 }
216 216
217 void PSOldGen::expand(size_t bytes) { 217 void PSOldGen::expand(size_t bytes) {
218 if (bytes == 0) {
219 return;
220 }
218 MutexLocker x(ExpandHeap_lock); 221 MutexLocker x(ExpandHeap_lock);
219 const size_t alignment = virtual_space()->alignment(); 222 const size_t alignment = virtual_space()->alignment();
220 size_t aligned_bytes = align_size_up(bytes, alignment); 223 size_t aligned_bytes = align_size_up(bytes, alignment);
221 size_t aligned_expand_bytes = align_size_up(MinHeapDeltaBytes, alignment); 224 size_t aligned_expand_bytes = align_size_up(MinHeapDeltaBytes, alignment);
225 if (aligned_bytes == 0){
226 // The alignment caused the number of bytes to wrap. An expand_by(0) will
227 // return true with the implication that and expansion was done when it
228 // was not. A call to expand implies a best effort to expand by "bytes"
229 // but not a guarantee. Align down to give a best effort. This is likely
230 // the most that the generation can expand since it has some capacity to
231 // start with.
232 aligned_bytes = align_size_down(bytes, alignment);
233 }
222 234
223 bool success = false; 235 bool success = false;
224 if (aligned_expand_bytes > aligned_bytes) { 236 if (aligned_expand_bytes > aligned_bytes) {
225 success = expand_by(aligned_expand_bytes); 237 success = expand_by(aligned_expand_bytes);
226 } 238 }
229 } 241 }
230 if (!success) { 242 if (!success) {
231 success = expand_to_reserved(); 243 success = expand_to_reserved();
232 } 244 }
233 245
234 if (GC_locker::is_active()) { 246 if (PrintGC && Verbose) {
235 if (PrintGC && Verbose) { 247 if (success && GC_locker::is_active()) {
236 gclog_or_tty->print_cr("Garbage collection disabled, expanded heap instead"); 248 gclog_or_tty->print_cr("Garbage collection disabled, expanded heap instead");
237 } 249 }
238 } 250 }
239 } 251 }
240 252
241 bool PSOldGen::expand_by(size_t bytes) { 253 bool PSOldGen::expand_by(size_t bytes) {
242 assert_lock_strong(ExpandHeap_lock); 254 assert_lock_strong(ExpandHeap_lock);
243 assert_locked_or_safepoint(Heap_lock); 255 assert_locked_or_safepoint(Heap_lock);
256 if (bytes == 0) {
257 return true; // That's what virtual_space()->expand_by(0) would return
258 }
244 bool result = virtual_space()->expand_by(bytes); 259 bool result = virtual_space()->expand_by(bytes);
245 if (result) { 260 if (result) {
246 if (ZapUnusedHeapArea) { 261 if (ZapUnusedHeapArea) {
247 // We need to mangle the newly expanded area. The memregion spans 262 // We need to mangle the newly expanded area. The memregion spans
248 // end -> new_end, we assume that top -> end is already mangled. 263 // end -> new_end, we assume that top -> end is already mangled.