comparison src/share/vm/gc_implementation/parallelScavenge/psYoungGen.cpp @ 263:12eea04c8b06

6672698: mangle_unused_area() should not remangle the entire heap at each collection. Summary: Maintain a high water mark for the allocations in a space and mangle only up to that high water mark. Reviewed-by: ysr, apetrusenko
author jmasa
date Wed, 09 Jul 2008 15:08:55 -0700
parents 183f41cf8bfe
children 850fdf70db2b
comparison
equal deleted inserted replaced
225:286bee59f34b 263:12eea04c8b06
34 {} 34 {}
35 35
36 void PSYoungGen::initialize_virtual_space(ReservedSpace rs, size_t alignment) { 36 void PSYoungGen::initialize_virtual_space(ReservedSpace rs, size_t alignment) {
37 assert(_init_gen_size != 0, "Should have a finite size"); 37 assert(_init_gen_size != 0, "Should have a finite size");
38 _virtual_space = new PSVirtualSpace(rs, alignment); 38 _virtual_space = new PSVirtualSpace(rs, alignment);
39 if (!_virtual_space->expand_by(_init_gen_size)) { 39 if (!virtual_space()->expand_by(_init_gen_size)) {
40 vm_exit_during_initialization("Could not reserve enough space for " 40 vm_exit_during_initialization("Could not reserve enough space for "
41 "object heap"); 41 "object heap");
42 } 42 }
43 } 43 }
44 44
47 initialize_work(); 47 initialize_work();
48 } 48 }
49 49
50 void PSYoungGen::initialize_work() { 50 void PSYoungGen::initialize_work() {
51 51
52 _reserved = MemRegion((HeapWord*)_virtual_space->low_boundary(), 52 _reserved = MemRegion((HeapWord*)virtual_space()->low_boundary(),
53 (HeapWord*)_virtual_space->high_boundary()); 53 (HeapWord*)virtual_space()->high_boundary());
54 54
55 MemRegion cmr((HeapWord*)_virtual_space->low(), 55 MemRegion cmr((HeapWord*)virtual_space()->low(),
56 (HeapWord*)_virtual_space->high()); 56 (HeapWord*)virtual_space()->high());
57 Universe::heap()->barrier_set()->resize_covered_region(cmr); 57 Universe::heap()->barrier_set()->resize_covered_region(cmr);
58
59 if (ZapUnusedHeapArea) {
60 // Mangle newly committed space immediately because it
61 // can be done here more simply that after the new
62 // spaces have been computed.
63 SpaceMangler::mangle_region(cmr);
64 }
58 65
59 if (UseNUMA) { 66 if (UseNUMA) {
60 _eden_space = new MutableNUMASpace(); 67 _eden_space = new MutableNUMASpace();
61 } else { 68 } else {
62 _eden_space = new MutableSpace(); 69 _eden_space = new MutableSpace();
87 _gen_counters = new PSGenerationCounters("new", 0, 3, _virtual_space); 94 _gen_counters = new PSGenerationCounters("new", 0, 3, _virtual_space);
88 95
89 // Compute maximum space sizes for performance counters 96 // Compute maximum space sizes for performance counters
90 ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); 97 ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap();
91 size_t alignment = heap->intra_heap_alignment(); 98 size_t alignment = heap->intra_heap_alignment();
92 size_t size = _virtual_space->reserved_size(); 99 size_t size = virtual_space()->reserved_size();
93 100
94 size_t max_survivor_size; 101 size_t max_survivor_size;
95 size_t max_eden_size; 102 size_t max_eden_size;
96 103
97 if (UseAdaptiveSizePolicy) { 104 if (UseAdaptiveSizePolicy) {
140 ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); 147 ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap();
141 assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity"); 148 assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "Sanity");
142 149
143 // Compute sizes 150 // Compute sizes
144 size_t alignment = heap->intra_heap_alignment(); 151 size_t alignment = heap->intra_heap_alignment();
145 size_t size = _virtual_space->committed_size(); 152 size_t size = virtual_space()->committed_size();
146 153
147 size_t survivor_size = size / InitialSurvivorRatio; 154 size_t survivor_size = size / InitialSurvivorRatio;
148 survivor_size = align_size_down(survivor_size, alignment); 155 survivor_size = align_size_down(survivor_size, alignment);
149 // ... but never less than an alignment 156 // ... but never less than an alignment
150 survivor_size = MAX2(survivor_size, alignment); 157 survivor_size = MAX2(survivor_size, alignment);
162 _to_counters->update_capacity(); 169 _to_counters->update_capacity();
163 } 170 }
164 } 171 }
165 172
166 void PSYoungGen::set_space_boundaries(size_t eden_size, size_t survivor_size) { 173 void PSYoungGen::set_space_boundaries(size_t eden_size, size_t survivor_size) {
167 assert(eden_size < _virtual_space->committed_size(), "just checking"); 174 assert(eden_size < virtual_space()->committed_size(), "just checking");
168 assert(eden_size > 0 && survivor_size > 0, "just checking"); 175 assert(eden_size > 0 && survivor_size > 0, "just checking");
169 176
170 // Initial layout is Eden, to, from. After swapping survivor spaces, 177 // Initial layout is Eden, to, from. After swapping survivor spaces,
171 // that leaves us with Eden, from, to, which is step one in our two 178 // that leaves us with Eden, from, to, which is step one in our two
172 // step resize-with-live-data procedure. 179 // step resize-with-live-data procedure.
173 char *eden_start = _virtual_space->low(); 180 char *eden_start = virtual_space()->low();
174 char *to_start = eden_start + eden_size; 181 char *to_start = eden_start + eden_size;
175 char *from_start = to_start + survivor_size; 182 char *from_start = to_start + survivor_size;
176 char *from_end = from_start + survivor_size; 183 char *from_end = from_start + survivor_size;
177 184
178 assert(from_end == _virtual_space->high(), "just checking"); 185 assert(from_end == virtual_space()->high(), "just checking");
179 assert(is_object_aligned((intptr_t)eden_start), "checking alignment"); 186 assert(is_object_aligned((intptr_t)eden_start), "checking alignment");
180 assert(is_object_aligned((intptr_t)to_start), "checking alignment"); 187 assert(is_object_aligned((intptr_t)to_start), "checking alignment");
181 assert(is_object_aligned((intptr_t)from_start), "checking alignment"); 188 assert(is_object_aligned((intptr_t)from_start), "checking alignment");
182 189
183 MemRegion eden_mr((HeapWord*)eden_start, (HeapWord*)to_start); 190 MemRegion eden_mr((HeapWord*)eden_start, (HeapWord*)to_start);
184 MemRegion to_mr ((HeapWord*)to_start, (HeapWord*)from_start); 191 MemRegion to_mr ((HeapWord*)to_start, (HeapWord*)from_start);
185 MemRegion from_mr((HeapWord*)from_start, (HeapWord*)from_end); 192 MemRegion from_mr((HeapWord*)from_start, (HeapWord*)from_end);
186 193
187 eden_space()->initialize(eden_mr, true); 194 eden_space()->initialize(eden_mr, true, ZapUnusedHeapArea);
188 to_space()->initialize(to_mr , true); 195 to_space()->initialize(to_mr , true, ZapUnusedHeapArea);
189 from_space()->initialize(from_mr, true); 196 from_space()->initialize(from_mr, true, ZapUnusedHeapArea);
190 } 197 }
191 198
192 #ifndef PRODUCT 199 #ifndef PRODUCT
193 void PSYoungGen::space_invariants() { 200 void PSYoungGen::space_invariants() {
194 ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); 201 ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap();
205 char* from_start = (char*)from_space()->bottom(); 212 char* from_start = (char*)from_space()->bottom();
206 char* from_end = (char*)from_space()->end(); 213 char* from_end = (char*)from_space()->end();
207 char* to_start = (char*)to_space()->bottom(); 214 char* to_start = (char*)to_space()->bottom();
208 char* to_end = (char*)to_space()->end(); 215 char* to_end = (char*)to_space()->end();
209 216
210 guarantee(eden_start >= _virtual_space->low(), "eden bottom"); 217 guarantee(eden_start >= virtual_space()->low(), "eden bottom");
211 guarantee(eden_start < eden_end, "eden space consistency"); 218 guarantee(eden_start < eden_end, "eden space consistency");
212 guarantee(from_start < from_end, "from space consistency"); 219 guarantee(from_start < from_end, "from space consistency");
213 guarantee(to_start < to_end, "to space consistency"); 220 guarantee(to_start < to_end, "to space consistency");
214 221
215 // Check whether from space is below to space 222 // Check whether from space is below to space
216 if (from_start < to_start) { 223 if (from_start < to_start) {
217 // Eden, from, to 224 // Eden, from, to
218 guarantee(eden_end <= from_start, "eden/from boundary"); 225 guarantee(eden_end <= from_start, "eden/from boundary");
219 guarantee(from_end <= to_start, "from/to boundary"); 226 guarantee(from_end <= to_start, "from/to boundary");
220 guarantee(to_end <= _virtual_space->high(), "to end"); 227 guarantee(to_end <= virtual_space()->high(), "to end");
221 } else { 228 } else {
222 // Eden, to, from 229 // Eden, to, from
223 guarantee(eden_end <= to_start, "eden/to boundary"); 230 guarantee(eden_end <= to_start, "eden/to boundary");
224 guarantee(to_end <= from_start, "to/from boundary"); 231 guarantee(to_end <= from_start, "to/from boundary");
225 guarantee(from_end <= _virtual_space->high(), "from end"); 232 guarantee(from_end <= virtual_space()->high(), "from end");
226 } 233 }
227 234
228 // More checks that the virtual space is consistent with the spaces 235 // More checks that the virtual space is consistent with the spaces
229 assert(_virtual_space->committed_size() >= 236 assert(virtual_space()->committed_size() >=
230 (eden_space()->capacity_in_bytes() + 237 (eden_space()->capacity_in_bytes() +
231 to_space()->capacity_in_bytes() + 238 to_space()->capacity_in_bytes() +
232 from_space()->capacity_in_bytes()), "Committed size is inconsistent"); 239 from_space()->capacity_in_bytes()), "Committed size is inconsistent");
233 assert(_virtual_space->committed_size() <= _virtual_space->reserved_size(), 240 assert(virtual_space()->committed_size() <= virtual_space()->reserved_size(),
234 "Space invariant"); 241 "Space invariant");
235 char* eden_top = (char*)eden_space()->top(); 242 char* eden_top = (char*)eden_space()->top();
236 char* from_top = (char*)from_space()->top(); 243 char* from_top = (char*)from_space()->top();
237 char* to_top = (char*)to_space()->top(); 244 char* to_top = (char*)to_space()->top();
238 assert(eden_top <= _virtual_space->high(), "eden top"); 245 assert(eden_top <= virtual_space()->high(), "eden top");
239 assert(from_top <= _virtual_space->high(), "from top"); 246 assert(from_top <= virtual_space()->high(), "from top");
240 assert(to_top <= _virtual_space->high(), "to top"); 247 assert(to_top <= virtual_space()->high(), "to top");
241 248
242 _virtual_space->verify(); 249 virtual_space()->verify();
243 } 250 }
244 #endif 251 #endif
245 252
246 void PSYoungGen::resize(size_t eden_size, size_t survivor_size) { 253 void PSYoungGen::resize(size_t eden_size, size_t survivor_size) {
247 // Resize the generation if needed. If the generation resize 254 // Resize the generation if needed. If the generation resize
263 } 270 }
264 } 271 }
265 272
266 273
267 bool PSYoungGen::resize_generation(size_t eden_size, size_t survivor_size) { 274 bool PSYoungGen::resize_generation(size_t eden_size, size_t survivor_size) {
268 const size_t alignment = _virtual_space->alignment(); 275 const size_t alignment = virtual_space()->alignment();
269 size_t orig_size = _virtual_space->committed_size(); 276 size_t orig_size = virtual_space()->committed_size();
270 bool size_changed = false; 277 bool size_changed = false;
271 278
272 // There used to be this guarantee there. 279 // There used to be this guarantee there.
273 // guarantee ((eden_size + 2*survivor_size) <= _max_gen_size, "incorrect input arguments"); 280 // guarantee ((eden_size + 2*survivor_size) <= _max_gen_size, "incorrect input arguments");
274 // Code below forces this requirement. In addition the desired eden 281 // Code below forces this requirement. In addition the desired eden
286 293
287 if (desired_size > orig_size) { 294 if (desired_size > orig_size) {
288 // Grow the generation 295 // Grow the generation
289 size_t change = desired_size - orig_size; 296 size_t change = desired_size - orig_size;
290 assert(change % alignment == 0, "just checking"); 297 assert(change % alignment == 0, "just checking");
291 if (!_virtual_space->expand_by(change)) { 298 HeapWord* prev_high = (HeapWord*) virtual_space()->high();
299 if (!virtual_space()->expand_by(change)) {
292 return false; // Error if we fail to resize! 300 return false; // Error if we fail to resize!
293 } 301 }
294 302 if (ZapUnusedHeapArea) {
303 // Mangle newly committed space immediately because it
304 // can be done here more simply that after the new
305 // spaces have been computed.
306 HeapWord* new_high = (HeapWord*) virtual_space()->high();
307 MemRegion mangle_region(prev_high, new_high);
308 SpaceMangler::mangle_region(mangle_region);
309 }
295 size_changed = true; 310 size_changed = true;
296 } else if (desired_size < orig_size) { 311 } else if (desired_size < orig_size) {
297 size_t desired_change = orig_size - desired_size; 312 size_t desired_change = orig_size - desired_size;
298 assert(desired_change % alignment == 0, "just checking"); 313 assert(desired_change % alignment == 0, "just checking");
299 314
319 334
320 if (size_changed) { 335 if (size_changed) {
321 post_resize(); 336 post_resize();
322 337
323 if (Verbose && PrintGC) { 338 if (Verbose && PrintGC) {
324 size_t current_size = _virtual_space->committed_size(); 339 size_t current_size = virtual_space()->committed_size();
325 gclog_or_tty->print_cr("PSYoung generation size changed: " 340 gclog_or_tty->print_cr("PSYoung generation size changed: "
326 SIZE_FORMAT "K->" SIZE_FORMAT "K", 341 SIZE_FORMAT "K->" SIZE_FORMAT "K",
327 orig_size/K, current_size/K); 342 orig_size/K, current_size/K);
328 } 343 }
329 } 344 }
330 345
331 guarantee(eden_plus_survivors <= _virtual_space->committed_size() || 346 guarantee(eden_plus_survivors <= virtual_space()->committed_size() ||
332 _virtual_space->committed_size() == max_size(), "Sanity"); 347 virtual_space()->committed_size() == max_size(), "Sanity");
333 348
334 return true; 349 return true;
335 } 350 }
336 351
352 #ifndef PRODUCT
353 // In the numa case eden is not mangled so a survivor space
354 // moving into a region previously occupied by a survivor
355 // may find an unmangled region. Also in the PS case eden
356 // to-space and from-space may not touch (i.e., there may be
357 // gaps between them due to movement while resizing the
358 // spaces). Those gaps must be mangled.
359 void PSYoungGen::mangle_survivors(MutableSpace* s1,
360 MemRegion s1MR,
361 MutableSpace* s2,
362 MemRegion s2MR) {
363 // Check eden and gap between eden and from-space, in deciding
364 // what to mangle in from-space. Check the gap between from-space
365 // and to-space when deciding what to mangle.
366 //
367 // +--------+ +----+ +---+
368 // | eden | |s1 | |s2 |
369 // +--------+ +----+ +---+
370 // +-------+ +-----+
371 // |s1MR | |s2MR |
372 // +-------+ +-----+
373 // All of survivor-space is properly mangled so find the
374 // upper bound on the mangling for any portion above current s1.
375 HeapWord* delta_end = MIN2(s1->bottom(), s1MR.end());
376 MemRegion delta1_left;
377 if (s1MR.start() < delta_end) {
378 delta1_left = MemRegion(s1MR.start(), delta_end);
379 s1->mangle_region(delta1_left);
380 }
381 // Find any portion to the right of the current s1.
382 HeapWord* delta_start = MAX2(s1->end(), s1MR.start());
383 MemRegion delta1_right;
384 if (delta_start < s1MR.end()) {
385 delta1_right = MemRegion(delta_start, s1MR.end());
386 s1->mangle_region(delta1_right);
387 }
388
389 // Similarly for the second survivor space except that
390 // any of the new region that overlaps with the current
391 // region of the first survivor space has already been
392 // mangled.
393 delta_end = MIN2(s2->bottom(), s2MR.end());
394 delta_start = MAX2(s2MR.start(), s1->end());
395 MemRegion delta2_left;
396 if (s2MR.start() < delta_end) {
397 delta2_left = MemRegion(s2MR.start(), delta_end);
398 s2->mangle_region(delta2_left);
399 }
400 delta_start = MAX2(s2->end(), s2MR.start());
401 MemRegion delta2_right;
402 if (delta_start < s2MR.end()) {
403 s2->mangle_region(delta2_right);
404 }
405
406 if (TraceZapUnusedHeapArea) {
407 // s1
408 gclog_or_tty->print_cr("Current region: [" PTR_FORMAT ", " PTR_FORMAT ") "
409 "New region: [" PTR_FORMAT ", " PTR_FORMAT ")",
410 s1->bottom(), s1->end(), s1MR.start(), s1MR.end());
411 gclog_or_tty->print_cr(" Mangle before: [" PTR_FORMAT ", "
412 PTR_FORMAT ") Mangle after: [" PTR_FORMAT ", " PTR_FORMAT ")",
413 delta1_left.start(), delta1_left.end(), delta1_right.start(),
414 delta1_right.end());
415
416 // s2
417 gclog_or_tty->print_cr("Current region: [" PTR_FORMAT ", " PTR_FORMAT ") "
418 "New region: [" PTR_FORMAT ", " PTR_FORMAT ")",
419 s2->bottom(), s2->end(), s2MR.start(), s2MR.end());
420 gclog_or_tty->print_cr(" Mangle before: [" PTR_FORMAT ", "
421 PTR_FORMAT ") Mangle after: [" PTR_FORMAT ", " PTR_FORMAT ")",
422 delta2_left.start(), delta2_left.end(), delta2_right.start(),
423 delta2_right.end());
424 }
425
426 }
427 #endif // NOT PRODUCT
337 428
338 void PSYoungGen::resize_spaces(size_t requested_eden_size, 429 void PSYoungGen::resize_spaces(size_t requested_eden_size,
339 size_t requested_survivor_size) { 430 size_t requested_survivor_size) {
340 assert(UseAdaptiveSizePolicy, "sanity check"); 431 assert(UseAdaptiveSizePolicy, "sanity check");
341 assert(requested_eden_size > 0 && requested_survivor_size > 0, 432 assert(requested_eden_size > 0 && requested_survivor_size > 0,
394 ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); 485 ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap();
395 const size_t alignment = heap->intra_heap_alignment(); 486 const size_t alignment = heap->intra_heap_alignment();
396 const bool maintain_minimum = 487 const bool maintain_minimum =
397 (requested_eden_size + 2 * requested_survivor_size) <= min_gen_size(); 488 (requested_eden_size + 2 * requested_survivor_size) <= min_gen_size();
398 489
490 bool eden_from_to_order = from_start < to_start;
399 // Check whether from space is below to space 491 // Check whether from space is below to space
400 if (from_start < to_start) { 492 if (eden_from_to_order) {
401 // Eden, from, to 493 // Eden, from, to
494 eden_from_to_order = true;
402 if (PrintAdaptiveSizePolicy && Verbose) { 495 if (PrintAdaptiveSizePolicy && Verbose) {
403 gclog_or_tty->print_cr(" Eden, from, to:"); 496 gclog_or_tty->print_cr(" Eden, from, to:");
404 } 497 }
405 498
406 // Set eden 499 // Set eden
433 // To may resize into from space as long as it is clear of live data. 526 // To may resize into from space as long as it is clear of live data.
434 // From space must remain page aligned, though, so we need to do some 527 // From space must remain page aligned, though, so we need to do some
435 // extra calculations. 528 // extra calculations.
436 529
437 // First calculate an optimal to-space 530 // First calculate an optimal to-space
438 to_end = (char*)_virtual_space->high(); 531 to_end = (char*)virtual_space()->high();
439 to_start = (char*)pointer_delta(to_end, (char*)requested_survivor_size, 532 to_start = (char*)pointer_delta(to_end, (char*)requested_survivor_size,
440 sizeof(char)); 533 sizeof(char));
441 534
442 // Does the optimal to-space overlap from-space? 535 // Does the optimal to-space overlap from-space?
443 if (to_start < (char*)from_space()->end()) { 536 if (to_start < (char*)from_space()->end()) {
489 582
490 // To space gets priority over eden resizing. Note that we position 583 // To space gets priority over eden resizing. Note that we position
491 // to space as if we were able to resize from space, even though from 584 // to space as if we were able to resize from space, even though from
492 // space is not modified. 585 // space is not modified.
493 // Giving eden priority was tried and gave poorer performance. 586 // Giving eden priority was tried and gave poorer performance.
494 to_end = (char*)pointer_delta(_virtual_space->high(), 587 to_end = (char*)pointer_delta(virtual_space()->high(),
495 (char*)requested_survivor_size, 588 (char*)requested_survivor_size,
496 sizeof(char)); 589 sizeof(char));
497 to_end = MIN2(to_end, from_start); 590 to_end = MIN2(to_end, from_start);
498 to_start = (char*)pointer_delta(to_end, (char*)requested_survivor_size, 591 to_start = (char*)pointer_delta(to_end, (char*)requested_survivor_size,
499 sizeof(char)); 592 sizeof(char));
558 651
559 // For PrintAdaptiveSizePolicy block below 652 // For PrintAdaptiveSizePolicy block below
560 size_t old_from = from_space()->capacity_in_bytes(); 653 size_t old_from = from_space()->capacity_in_bytes();
561 size_t old_to = to_space()->capacity_in_bytes(); 654 size_t old_to = to_space()->capacity_in_bytes();
562 655
563 eden_space()->initialize(edenMR, true); 656 if (ZapUnusedHeapArea) {
564 to_space()->initialize(toMR , true); 657 // NUMA is a special case because a numa space is not mangled
565 from_space()->initialize(fromMR, false); // Note, not cleared! 658 // in order to not prematurely bind its address to memory to
659 // the wrong memory (i.e., don't want the GC thread to first
660 // touch the memory). The survivor spaces are not numa
661 // spaces and are mangled.
662 if (UseNUMA) {
663 if (eden_from_to_order) {
664 mangle_survivors(from_space(), fromMR, to_space(), toMR);
665 } else {
666 mangle_survivors(to_space(), toMR, from_space(), fromMR);
667 }
668 }
669
670 // If not mangling the spaces, do some checking to verify that
671 // the spaces are already mangled.
672 // The spaces should be correctly mangled at this point so
673 // do some checking here. Note that they are not being mangled
674 // in the calls to initialize().
675 // Must check mangling before the spaces are reshaped. Otherwise,
676 // the bottom or end of one space may have moved into an area
677 // covered by another space and a failure of the check may
678 // not correctly indicate which space is not properly mangled.
679 HeapWord* limit = (HeapWord*) virtual_space()->high();
680 eden_space()->check_mangled_unused_area(limit);
681 from_space()->check_mangled_unused_area(limit);
682 to_space()->check_mangled_unused_area(limit);
683 }
684 // When an existing space is being initialized, it is not
685 // mangled because the space has been previously mangled.
686 eden_space()->initialize(edenMR,
687 SpaceDecorator::Clear,
688 SpaceDecorator::DontMangle);
689 to_space()->initialize(toMR,
690 SpaceDecorator::Clear,
691 SpaceDecorator::DontMangle);
692 from_space()->initialize(fromMR,
693 SpaceDecorator::DontClear,
694 SpaceDecorator::DontMangle);
566 695
567 assert(from_space()->top() == old_from_top, "from top changed!"); 696 assert(from_space()->top() == old_from_top, "from top changed!");
568 697
569 if (PrintAdaptiveSizePolicy) { 698 if (PrintAdaptiveSizePolicy) {
570 ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap(); 699 ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap();
669 capacity_in_bytes(), used_in_bytes()); 798 capacity_in_bytes(), used_in_bytes());
670 } else { 799 } else {
671 st->print(" total " SIZE_FORMAT "K, used " SIZE_FORMAT "K", 800 st->print(" total " SIZE_FORMAT "K, used " SIZE_FORMAT "K",
672 capacity_in_bytes()/K, used_in_bytes()/K); 801 capacity_in_bytes()/K, used_in_bytes()/K);
673 } 802 }
674 _virtual_space->print_space_boundaries_on(st); 803 virtual_space()->print_space_boundaries_on(st);
675 st->print(" eden"); eden_space()->print_on(st); 804 st->print(" eden"); eden_space()->print_on(st);
676 st->print(" from"); from_space()->print_on(st); 805 st->print(" from"); from_space()->print_on(st);
677 st->print(" to "); to_space()->print_on(st); 806 st->print(" to "); to_space()->print_on(st);
678 } 807 }
679 808
772 HeapWord* new_end = (HeapWord*)virtual_space()->high(); 901 HeapWord* new_end = (HeapWord*)virtual_space()->high();
773 assert(new_end >= space_shrinking->bottom(), "Shrink was too large"); 902 assert(new_end >= space_shrinking->bottom(), "Shrink was too large");
774 // Was there a shrink of the survivor space? 903 // Was there a shrink of the survivor space?
775 if (new_end < space_shrinking->end()) { 904 if (new_end < space_shrinking->end()) {
776 MemRegion mr(space_shrinking->bottom(), new_end); 905 MemRegion mr(space_shrinking->bottom(), new_end);
777 space_shrinking->initialize(mr, false /* clear */); 906 space_shrinking->initialize(mr,
907 SpaceDecorator::DontClear,
908 SpaceDecorator::Mangle);
778 } 909 }
779 } 910 }
780 911
781 // This method currently does not expect to expand into eden (i.e., 912 // This method currently does not expect to expand into eden (i.e.,
782 // the virtual space boundaries is expected to be consistent 913 // the virtual space boundaries is expected to be consistent
807 void PSYoungGen::verify(bool allow_dirty) { 938 void PSYoungGen::verify(bool allow_dirty) {
808 eden_space()->verify(allow_dirty); 939 eden_space()->verify(allow_dirty);
809 from_space()->verify(allow_dirty); 940 from_space()->verify(allow_dirty);
810 to_space()->verify(allow_dirty); 941 to_space()->verify(allow_dirty);
811 } 942 }
943
944 #ifndef PRODUCT
945 void PSYoungGen::record_spaces_top() {
946 assert(ZapUnusedHeapArea, "Not mangling unused space");
947 eden_space()->set_top_for_allocations();
948 from_space()->set_top_for_allocations();
949 to_space()->set_top_for_allocations();
950 }
951 #endif