comparison src/share/vm/gc_implementation/shared/mutableNUMASpace.cpp @ 263:12eea04c8b06

6672698: mangle_unused_area() should not remangle the entire heap at each collection. Summary: Maintain a high water mark for the allocations in a space and mangle only up to that high water mark. Reviewed-by: ysr, apetrusenko
author jmasa
date Wed, 09 Jul 2008 15:08:55 -0700
parents d1635bf93939
children d6340ab4105b
comparison
equal deleted inserted replaced
225:286bee59f34b 263:12eea04c8b06
40 delete lgrp_spaces()->at(i); 40 delete lgrp_spaces()->at(i);
41 } 41 }
42 delete lgrp_spaces(); 42 delete lgrp_spaces();
43 } 43 }
44 44
45 #ifndef PRODUCT
45 void MutableNUMASpace::mangle_unused_area() { 46 void MutableNUMASpace::mangle_unused_area() {
46 for (int i = 0; i < lgrp_spaces()->length(); i++) { 47 // This method should do nothing.
47 LGRPSpace *ls = lgrp_spaces()->at(i); 48 // It can be called on a numa space during a full compaction.
48 MutableSpace *s = ls->space(); 49 }
49 if (!os::numa_has_static_binding()) { 50 void MutableNUMASpace::mangle_unused_area_complete() {
50 HeapWord *top = MAX2((HeapWord*)round_down((intptr_t)s->top(), page_size()), s->bottom()); 51 // This method should do nothing.
51 if (top < s->end()) { 52 // It can be called on a numa space during a full compaction.
52 ls->add_invalid_region(MemRegion(top, s->end())); 53 }
53 } 54 void MutableNUMASpace::mangle_region(MemRegion mr) {
54 } 55 // This method should do nothing because numa spaces are not mangled.
55 s->mangle_unused_area(); 56 }
56 } 57 void MutableNUMASpace::set_top_for_allocations(HeapWord* v) {
57 } 58 assert(false, "Do not mangle MutableNUMASpace's");
59 }
60 void MutableNUMASpace::set_top_for_allocations() {
61 // This method should do nothing.
62 }
63 void MutableNUMASpace::check_mangled_unused_area(HeapWord* limit) {
64 // This method should do nothing.
65 }
66 void MutableNUMASpace::check_mangled_unused_area_complete() {
67 // This method should do nothing.
68 }
69 #endif // NOT_PRODUCT
58 70
59 // There may be unallocated holes in the middle chunks 71 // There may be unallocated holes in the middle chunks
60 // that should be filled with dead objects to ensure parseability. 72 // that should be filled with dead objects to ensure parseability.
61 void MutableNUMASpace::ensure_parsability() { 73 void MutableNUMASpace::ensure_parsability() {
62 for (int i = 0; i < lgrp_spaces()->length(); i++) { 74 for (int i = 0; i < lgrp_spaces()->length(); i++) {
241 for (int i = 0; i < lgrp_spaces()->length(); i++) { 253 for (int i = 0; i < lgrp_spaces()->length(); i++) {
242 MutableSpace *s = lgrp_spaces()->at(i)->space(); 254 MutableSpace *s = lgrp_spaces()->at(i)->space();
243 s->set_end(s->bottom()); 255 s->set_end(s->bottom());
244 s->set_top(s->bottom()); 256 s->set_top(s->bottom());
245 } 257 }
246 initialize(region(), true); 258 // A NUMA space is never mangled
259 initialize(region(),
260 SpaceDecorator::Clear,
261 SpaceDecorator::DontMangle);
247 } else { 262 } else {
248 bool should_initialize = false; 263 bool should_initialize = false;
249 if (!os::numa_has_static_binding()) { 264 if (!os::numa_has_static_binding()) {
250 for (int i = 0; i < lgrp_spaces()->length(); i++) { 265 for (int i = 0; i < lgrp_spaces()->length(); i++) {
251 if (!lgrp_spaces()->at(i)->invalid_region().is_empty()) { 266 if (!lgrp_spaces()->at(i)->invalid_region().is_empty()) {
255 } 270 }
256 } 271 }
257 272
258 if (should_initialize || 273 if (should_initialize ||
259 (UseAdaptiveNUMAChunkSizing && adaptation_cycles() < samples_count())) { 274 (UseAdaptiveNUMAChunkSizing && adaptation_cycles() < samples_count())) {
260 initialize(region(), true); 275 // A NUMA space is never mangled
276 initialize(region(),
277 SpaceDecorator::Clear,
278 SpaceDecorator::DontMangle);
261 } 279 }
262 } 280 }
263 281
264 if (NUMAStats) { 282 if (NUMAStats) {
265 for (int i = 0; i < lgrp_spaces()->length(); i++) { 283 for (int i = 0; i < lgrp_spaces()->length(); i++) {
446 } 464 }
447 *invalid_region = MemRegion(start, end); 465 *invalid_region = MemRegion(start, end);
448 } 466 }
449 } 467 }
450 468
451 void MutableNUMASpace::initialize(MemRegion mr, bool clear_space) { 469 void MutableNUMASpace::initialize(MemRegion mr,
470 bool clear_space,
471 bool mangle_space) {
452 assert(clear_space, "Reallocation will destory data!"); 472 assert(clear_space, "Reallocation will destory data!");
453 assert(lgrp_spaces()->length() > 0, "There should be at least one space"); 473 assert(lgrp_spaces()->length() > 0, "There should be at least one space");
454 474
455 MemRegion old_region = region(), new_region; 475 MemRegion old_region = region(), new_region;
456 set_bottom(mr.start()); 476 set_bottom(mr.start());
457 set_end(mr.end()); 477 set_end(mr.end());
458 MutableSpace::set_top(bottom()); 478 // Must always clear the space
479 clear(SpaceDecorator::DontMangle);
459 480
460 // Compute chunk sizes 481 // Compute chunk sizes
461 size_t prev_page_size = page_size(); 482 size_t prev_page_size = page_size();
462 set_page_size(UseLargePages ? os::large_page_size() : os::vm_page_size()); 483 set_page_size(UseLargePages ? os::large_page_size() : os::vm_page_size());
463 HeapWord* rounded_bottom = (HeapWord*)round_to((intptr_t) bottom(), page_size()); 484 HeapWord* rounded_bottom = (HeapWord*)round_to((intptr_t) bottom(), page_size());
584 // we reshape the heap. 605 // we reshape the heap.
585 bias_region(bottom_region, ls->lgrp_id()); 606 bias_region(bottom_region, ls->lgrp_id());
586 bias_region(top_region, ls->lgrp_id()); 607 bias_region(top_region, ls->lgrp_id());
587 } 608 }
588 609
589 // If we clear the region, we would mangle it in debug. That would cause page 610 // Clear space (set top = bottom) but never mangle.
590 // allocation in a different place. Hence setting the top directly. 611 s->initialize(new_region, SpaceDecorator::Clear, SpaceDecorator::DontMangle);
591 s->initialize(new_region, false);
592 s->set_top(s->bottom());
593 612
594 set_adaptation_cycles(samples_count()); 613 set_adaptation_cycles(samples_count());
595 } 614 }
596 } 615 }
597 616
639 i++; 658 i++;
640 } 659 }
641 MutableSpace::set_top(value); 660 MutableSpace::set_top(value);
642 } 661 }
643 662
644 void MutableNUMASpace::clear() { 663 void MutableNUMASpace::clear(bool mangle_space) {
645 MutableSpace::set_top(bottom()); 664 MutableSpace::set_top(bottom());
646 for (int i = 0; i < lgrp_spaces()->length(); i++) { 665 for (int i = 0; i < lgrp_spaces()->length(); i++) {
647 lgrp_spaces()->at(i)->space()->clear(); 666 // Never mangle NUMA spaces because the mangling will
667 // bind the memory to a possibly unwanted lgroup.
668 lgrp_spaces()->at(i)->space()->clear(SpaceDecorator::DontMangle);
648 } 669 }
649 } 670 }
650 671
651 /* 672 /*
652 Linux supports static memory binding, therefore the most part of the 673 Linux supports static memory binding, therefore the most part of the