comparison src/share/vm/gc_implementation/g1/heapRegion.cpp @ 2153:377371490991

Merge
author johnc
date Thu, 20 Jan 2011 13:57:12 -0800
parents 0fa27f37d4d4
children d25d4ca69222 abdfc822206f
comparison
equal deleted inserted replaced
2148:02b6913287da 2153:377371490991
21 * questions. 21 * questions.
22 * 22 *
23 */ 23 */
24 24
25 #include "precompiled.hpp" 25 #include "precompiled.hpp"
26 #include "gc_implementation/g1/concurrentZFThread.hpp"
27 #include "gc_implementation/g1/g1BlockOffsetTable.inline.hpp" 26 #include "gc_implementation/g1/g1BlockOffsetTable.inline.hpp"
28 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp" 27 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
29 #include "gc_implementation/g1/g1OopClosures.inline.hpp" 28 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
30 #include "gc_implementation/g1/heapRegion.inline.hpp" 29 #include "gc_implementation/g1/heapRegion.inline.hpp"
31 #include "gc_implementation/g1/heapRegionRemSet.hpp" 30 #include "gc_implementation/g1/heapRegionRemSet.hpp"
346 return new HeapRegionDCTOC(G1CollectedHeap::heap(), 345 return new HeapRegionDCTOC(G1CollectedHeap::heap(),
347 this, cl, precision, fk); 346 this, cl, precision, fk);
348 } 347 }
349 348
350 void HeapRegion::hr_clear(bool par, bool clear_space) { 349 void HeapRegion::hr_clear(bool par, bool clear_space) {
351 _humongous_type = NotHumongous; 350 assert(_humongous_type == NotHumongous,
352 _humongous_start_region = NULL; 351 "we should have already filtered out humongous regions");
352 assert(_humongous_start_region == NULL,
353 "we should have already filtered out humongous regions");
354 assert(_end == _orig_end,
355 "we should have already filtered out humongous regions");
356
353 _in_collection_set = false; 357 _in_collection_set = false;
354 _is_gc_alloc_region = false; 358 _is_gc_alloc_region = false;
355
356 // Age stuff (if parallel, this will be done separately, since it needs
357 // to be sequential).
358 G1CollectedHeap* g1h = G1CollectedHeap::heap();
359 359
360 set_young_index_in_cset(-1); 360 set_young_index_in_cset(-1);
361 uninstall_surv_rate_group(); 361 uninstall_surv_rate_group();
362 set_young_type(NotYoung); 362 set_young_type(NotYoung);
363
364 // In case it had been the start of a humongous sequence, reset its end.
365 set_end(_orig_end);
366 363
367 if (!par) { 364 if (!par) {
368 // If this is parallel, this will be done later. 365 // If this is parallel, this will be done later.
369 HeapRegionRemSet* hrrs = rem_set(); 366 HeapRegionRemSet* hrrs = rem_set();
370 if (hrrs != NULL) hrrs->clear(); 367 if (hrrs != NULL) hrrs->clear();
385 g1h->predict_region_elapsed_time_ms(this, false); 382 g1h->predict_region_elapsed_time_ms(this, false);
386 } 383 }
387 // </PREDICTION> 384 // </PREDICTION>
388 385
389 void HeapRegion::set_startsHumongous(HeapWord* new_top, HeapWord* new_end) { 386 void HeapRegion::set_startsHumongous(HeapWord* new_top, HeapWord* new_end) {
387 assert(!isHumongous(), "sanity / pre-condition");
390 assert(end() == _orig_end, 388 assert(end() == _orig_end,
391 "Should be normal before the humongous object allocation"); 389 "Should be normal before the humongous object allocation");
392 assert(top() == bottom(), "should be empty"); 390 assert(top() == bottom(), "should be empty");
393 assert(bottom() <= new_top && new_top <= new_end, "pre-condition"); 391 assert(bottom() <= new_top && new_top <= new_end, "pre-condition");
394 392
398 set_end(new_end); 396 set_end(new_end);
399 _offsets.set_for_starts_humongous(new_top); 397 _offsets.set_for_starts_humongous(new_top);
400 } 398 }
401 399
402 void HeapRegion::set_continuesHumongous(HeapRegion* first_hr) { 400 void HeapRegion::set_continuesHumongous(HeapRegion* first_hr) {
401 assert(!isHumongous(), "sanity / pre-condition");
403 assert(end() == _orig_end, 402 assert(end() == _orig_end,
404 "Should be normal before the humongous object allocation"); 403 "Should be normal before the humongous object allocation");
405 assert(top() == bottom(), "should be empty"); 404 assert(top() == bottom(), "should be empty");
406 assert(first_hr->startsHumongous(), "pre-condition"); 405 assert(first_hr->startsHumongous(), "pre-condition");
407 406
408 _humongous_type = ContinuesHumongous; 407 _humongous_type = ContinuesHumongous;
409 _humongous_start_region = first_hr; 408 _humongous_start_region = first_hr;
409 }
410
411 void HeapRegion::set_notHumongous() {
412 assert(isHumongous(), "pre-condition");
413
414 if (startsHumongous()) {
415 assert(top() <= end(), "pre-condition");
416 set_end(_orig_end);
417 if (top() > end()) {
418 // at least one "continues humongous" region after it
419 set_top(end());
420 }
421 } else {
422 // continues humongous
423 assert(end() == _orig_end, "sanity");
424 }
425
426 assert(capacity() == (size_t) HeapRegion::GrainBytes, "pre-condition");
427 _humongous_type = NotHumongous;
428 _humongous_start_region = NULL;
410 } 429 }
411 430
412 bool HeapRegion::claimHeapRegion(jint claimValue) { 431 bool HeapRegion::claimHeapRegion(jint claimValue) {
413 jint current = _claimed; 432 jint current = _claimed;
414 if (current != claimValue) { 433 if (current != claimValue) {
441 } 460 }
442 assert(low == high && low >= addr, "Didn't work."); 461 assert(low == high && low >= addr, "Didn't work.");
443 return low; 462 return low;
444 } 463 }
445 464
446 void HeapRegion::set_next_on_unclean_list(HeapRegion* r) {
447 assert(r == NULL || r->is_on_unclean_list(), "Malformed unclean list.");
448 _next_in_special_set = r;
449 }
450
451 void HeapRegion::set_on_unclean_list(bool b) {
452 _is_on_unclean_list = b;
453 }
454
455 void HeapRegion::initialize(MemRegion mr, bool clear_space, bool mangle_space) { 465 void HeapRegion::initialize(MemRegion mr, bool clear_space, bool mangle_space) {
456 G1OffsetTableContigSpace::initialize(mr, false, mangle_space); 466 G1OffsetTableContigSpace::initialize(mr, false, mangle_space);
457 hr_clear(false/*par*/, clear_space); 467 hr_clear(false/*par*/, clear_space);
458 } 468 }
459 #ifdef _MSC_VER // the use of 'this' below gets a warning, make it go away 469 #ifdef _MSC_VER // the use of 'this' below gets a warning, make it go away
467 : G1OffsetTableContigSpace(sharedOffsetArray, mr, is_zeroed), 477 : G1OffsetTableContigSpace(sharedOffsetArray, mr, is_zeroed),
468 _next_fk(HeapRegionDCTOC::NoFilterKind), 478 _next_fk(HeapRegionDCTOC::NoFilterKind),
469 _hrs_index(-1), 479 _hrs_index(-1),
470 _humongous_type(NotHumongous), _humongous_start_region(NULL), 480 _humongous_type(NotHumongous), _humongous_start_region(NULL),
471 _in_collection_set(false), _is_gc_alloc_region(false), 481 _in_collection_set(false), _is_gc_alloc_region(false),
472 _is_on_free_list(false), _is_on_unclean_list(false),
473 _next_in_special_set(NULL), _orig_end(NULL), 482 _next_in_special_set(NULL), _orig_end(NULL),
474 _claimed(InitialClaimValue), _evacuation_failed(false), 483 _claimed(InitialClaimValue), _evacuation_failed(false),
475 _prev_marked_bytes(0), _next_marked_bytes(0), _sort_index(-1), 484 _prev_marked_bytes(0), _next_marked_bytes(0), _sort_index(-1),
476 _young_type(NotYoung), _next_young_region(NULL), 485 _young_type(NotYoung), _next_young_region(NULL),
477 _next_dirty_cards_region(NULL), 486 _next_dirty_cards_region(NULL), _next(NULL), _pending_removal(false),
478 _young_index_in_cset(-1), _surv_rate_group(NULL), _age_index(-1), 487 #ifdef ASSERT
479 _rem_set(NULL), _zfs(NotZeroFilled), 488 _containing_set(NULL),
480 _recorded_rs_length(0), _predicted_elapsed_time_ms(0), 489 #endif // ASSERT
490 _young_index_in_cset(-1), _surv_rate_group(NULL), _age_index(-1),
491 _rem_set(NULL), _recorded_rs_length(0), _predicted_elapsed_time_ms(0),
481 _predicted_bytes_to_copy(0) 492 _predicted_bytes_to_copy(0)
482 { 493 {
483 _orig_end = mr.end(); 494 _orig_end = mr.end();
484 // Note that initialize() will set the start of the unmarked area of the 495 // Note that initialize() will set the start of the unmarked area of the
485 // region. 496 // region.
548 SPECIALIZED_SINCE_SAVE_MARKS_CLOSURES(HeapRegion_OOP_SINCE_SAVE_MARKS_DEFN) 559 SPECIALIZED_SINCE_SAVE_MARKS_CLOSURES(HeapRegion_OOP_SINCE_SAVE_MARKS_DEFN)
549 560
550 561
551 void HeapRegion::oop_before_save_marks_iterate(OopClosure* cl) { 562 void HeapRegion::oop_before_save_marks_iterate(OopClosure* cl) {
552 oops_in_mr_iterate(MemRegion(bottom(), saved_mark_word()), cl); 563 oops_in_mr_iterate(MemRegion(bottom(), saved_mark_word()), cl);
553 }
554
555 #ifdef DEBUG
556 HeapWord* HeapRegion::allocate(size_t size) {
557 jint state = zero_fill_state();
558 assert(!G1CollectedHeap::heap()->allocs_are_zero_filled() ||
559 zero_fill_is_allocated(),
560 "When ZF is on, only alloc in ZF'd regions");
561 return G1OffsetTableContigSpace::allocate(size);
562 }
563 #endif
564
565 void HeapRegion::set_zero_fill_state_work(ZeroFillState zfs) {
566 assert(ZF_mon->owned_by_self() ||
567 Universe::heap()->is_gc_active(),
568 "Must hold the lock or be a full GC to modify.");
569 #ifdef ASSERT
570 if (top() != bottom() && zfs != Allocated) {
571 ResourceMark rm;
572 stringStream region_str;
573 print_on(&region_str);
574 assert(top() == bottom() || zfs == Allocated,
575 err_msg("Region must be empty, or we must be setting it to allocated. "
576 "_zfs=%d, zfs=%d, region: %s", _zfs, zfs, region_str.as_string()));
577 }
578 #endif
579 _zfs = zfs;
580 }
581
582 void HeapRegion::set_zero_fill_complete() {
583 set_zero_fill_state_work(ZeroFilled);
584 if (ZF_mon->owned_by_self()) {
585 ZF_mon->notify_all();
586 }
587 }
588
589
590 void HeapRegion::ensure_zero_filled() {
591 MutexLockerEx x(ZF_mon, Mutex::_no_safepoint_check_flag);
592 ensure_zero_filled_locked();
593 }
594
595 void HeapRegion::ensure_zero_filled_locked() {
596 assert(ZF_mon->owned_by_self(), "Precondition");
597 bool should_ignore_zf = SafepointSynchronize::is_at_safepoint();
598 assert(should_ignore_zf || Heap_lock->is_locked(),
599 "Either we're in a GC or we're allocating a region.");
600 switch (zero_fill_state()) {
601 case HeapRegion::NotZeroFilled:
602 set_zero_fill_in_progress(Thread::current());
603 {
604 ZF_mon->unlock();
605 Copy::fill_to_words(bottom(), capacity()/HeapWordSize);
606 ZF_mon->lock_without_safepoint_check();
607 }
608 // A trap.
609 guarantee(zero_fill_state() == HeapRegion::ZeroFilling
610 && zero_filler() == Thread::current(),
611 "AHA! Tell Dave D if you see this...");
612 set_zero_fill_complete();
613 // gclog_or_tty->print_cr("Did sync ZF.");
614 ConcurrentZFThread::note_sync_zfs();
615 break;
616 case HeapRegion::ZeroFilling:
617 if (should_ignore_zf) {
618 // We can "break" the lock and take over the work.
619 Copy::fill_to_words(bottom(), capacity()/HeapWordSize);
620 set_zero_fill_complete();
621 ConcurrentZFThread::note_sync_zfs();
622 break;
623 } else {
624 ConcurrentZFThread::wait_for_ZF_completed(this);
625 }
626 case HeapRegion::ZeroFilled:
627 // Nothing to do.
628 break;
629 case HeapRegion::Allocated:
630 guarantee(false, "Should not call on allocated regions.");
631 }
632 assert(zero_fill_state() == HeapRegion::ZeroFilled, "Post");
633 } 564 }
634 565
635 HeapWord* 566 HeapWord*
636 HeapRegion::object_iterate_mem_careful(MemRegion mr, 567 HeapRegion::object_iterate_mem_careful(MemRegion mr,
637 ObjectClosure* cl) { 568 ObjectClosure* cl) {
1008 _gc_time_stamp(0) 939 _gc_time_stamp(0)
1009 { 940 {
1010 _offsets.set_space(this); 941 _offsets.set_space(this);
1011 initialize(mr, !is_zeroed, SpaceDecorator::Mangle); 942 initialize(mr, !is_zeroed, SpaceDecorator::Mangle);
1012 } 943 }
1013
1014 size_t RegionList::length() {
1015 size_t len = 0;
1016 HeapRegion* cur = hd();
1017 DEBUG_ONLY(HeapRegion* last = NULL);
1018 while (cur != NULL) {
1019 len++;
1020 DEBUG_ONLY(last = cur);
1021 cur = get_next(cur);
1022 }
1023 assert(last == tl(), "Invariant");
1024 return len;
1025 }
1026
1027 void RegionList::insert_before_head(HeapRegion* r) {
1028 assert(well_formed(), "Inv");
1029 set_next(r, hd());
1030 _hd = r;
1031 _sz++;
1032 if (tl() == NULL) _tl = r;
1033 assert(well_formed(), "Inv");
1034 }
1035
1036 void RegionList::prepend_list(RegionList* new_list) {
1037 assert(well_formed(), "Precondition");
1038 assert(new_list->well_formed(), "Precondition");
1039 HeapRegion* new_tl = new_list->tl();
1040 if (new_tl != NULL) {
1041 set_next(new_tl, hd());
1042 _hd = new_list->hd();
1043 _sz += new_list->sz();
1044 if (tl() == NULL) _tl = new_list->tl();
1045 } else {
1046 assert(new_list->hd() == NULL && new_list->sz() == 0, "Inv");
1047 }
1048 assert(well_formed(), "Inv");
1049 }
1050
1051 void RegionList::delete_after(HeapRegion* r) {
1052 assert(well_formed(), "Precondition");
1053 HeapRegion* next = get_next(r);
1054 assert(r != NULL, "Precondition");
1055 HeapRegion* next_tl = get_next(next);
1056 set_next(r, next_tl);
1057 dec_sz();
1058 if (next == tl()) {
1059 assert(next_tl == NULL, "Inv");
1060 _tl = r;
1061 }
1062 assert(well_formed(), "Inv");
1063 }
1064
1065 HeapRegion* RegionList::pop() {
1066 assert(well_formed(), "Inv");
1067 HeapRegion* res = hd();
1068 if (res != NULL) {
1069 _hd = get_next(res);
1070 _sz--;
1071 set_next(res, NULL);
1072 if (sz() == 0) _tl = NULL;
1073 }
1074 assert(well_formed(), "Inv");
1075 return res;
1076 }