comparison src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp @ 20335:eec72fa4b108

8040722: G1: Clean up usages of heap_region_containing Reviewed-by: tschatzl, jmasa
author brutisso
date Tue, 19 Aug 2014 12:39:06 +0200
parents ff3169f25621
children 6701abbc4441
comparison
equal deleted inserted replaced
20334:ff3169f25621 20335:eec72fa4b108
440 // be move during a partial collection. Though it can be 440 // be move during a partial collection. Though it can be
441 // inaccurate, it is sufficient for G1 because the conservative 441 // inaccurate, it is sufficient for G1 because the conservative
442 // implementation of is_scavengable() for G1 will indicate that 442 // implementation of is_scavengable() for G1 will indicate that
443 // all nmethods must be scanned during a partial collection. 443 // all nmethods must be scanned during a partial collection.
444 bool G1CollectedHeap::is_in_partial_collection(const void* p) { 444 bool G1CollectedHeap::is_in_partial_collection(const void* p) {
445 HeapRegion* hr = heap_region_containing(p); 445 if (p == NULL) {
446 return hr != NULL && hr->in_collection_set(); 446 return false;
447 }
448 return heap_region_containing(p)->in_collection_set();
447 } 449 }
448 #endif 450 #endif
449 451
450 // Returns true if the reference points to an object that 452 // Returns true if the reference points to an object that
451 // can move in an incremental collection. 453 // can move in an incremental collection.
452 bool G1CollectedHeap::is_scavengable(const void* p) { 454 bool G1CollectedHeap::is_scavengable(const void* p) {
453 G1CollectedHeap* g1h = G1CollectedHeap::heap();
454 G1CollectorPolicy* g1p = g1h->g1_policy();
455 HeapRegion* hr = heap_region_containing(p); 455 HeapRegion* hr = heap_region_containing(p);
456 if (hr == NULL) { 456 return !hr->isHumongous();
457 // null
458 assert(p == NULL, err_msg("Not NULL " PTR_FORMAT ,p));
459 return false;
460 } else {
461 return !hr->isHumongous();
462 }
463 } 457 }
464 458
465 void G1CollectedHeap::check_ct_logs_at_safepoint() { 459 void G1CollectedHeap::check_ct_logs_at_safepoint() {
466 DirtyCardQueueSet& dcqs = JavaThread::dirty_card_queue_set(); 460 DirtyCardQueueSet& dcqs = JavaThread::dirty_card_queue_set();
467 CardTableModRefBS* ct_bs = g1_barrier_set(); 461 CardTableModRefBS* ct_bs = g1_barrier_set();
2982 } 2976 }
2983 return NULL; 2977 return NULL;
2984 } 2978 }
2985 2979
2986 Space* G1CollectedHeap::space_containing(const void* addr) const { 2980 Space* G1CollectedHeap::space_containing(const void* addr) const {
2987 Space* res = heap_region_containing(addr); 2981 return heap_region_containing(addr);
2988 return res;
2989 } 2982 }
2990 2983
2991 HeapWord* G1CollectedHeap::block_start(const void* addr) const { 2984 HeapWord* G1CollectedHeap::block_start(const void* addr) const {
2992 Space* sp = space_containing(addr); 2985 Space* sp = space_containing(addr);
2993 if (sp != NULL) { 2986 return sp->block_start(addr);
2994 return sp->block_start(addr);
2995 }
2996 return NULL;
2997 } 2987 }
2998 2988
2999 size_t G1CollectedHeap::block_size(const HeapWord* addr) const { 2989 size_t G1CollectedHeap::block_size(const HeapWord* addr) const {
3000 Space* sp = space_containing(addr); 2990 Space* sp = space_containing(addr);
3001 assert(sp != NULL, "block_size of address outside of heap");
3002 return sp->block_size(addr); 2991 return sp->block_size(addr);
3003 } 2992 }
3004 2993
3005 bool G1CollectedHeap::block_is_obj(const HeapWord* addr) const { 2994 bool G1CollectedHeap::block_is_obj(const HeapWord* addr) const {
3006 Space* sp = space_containing(addr); 2995 Space* sp = space_containing(addr);
4650 4639
4651 G1ParGCAllocBuffer::G1ParGCAllocBuffer(size_t gclab_word_size) : 4640 G1ParGCAllocBuffer::G1ParGCAllocBuffer(size_t gclab_word_size) :
4652 ParGCAllocBuffer(gclab_word_size), _retired(true) { } 4641 ParGCAllocBuffer(gclab_word_size), _retired(true) { }
4653 4642
4654 void G1ParCopyHelper::mark_object(oop obj) { 4643 void G1ParCopyHelper::mark_object(oop obj) {
4655 #ifdef ASSERT 4644 assert(!_g1->heap_region_containing(obj)->in_collection_set(), "should not mark objects in the CSet");
4656 HeapRegion* hr = _g1->heap_region_containing(obj);
4657 assert(hr != NULL, "sanity");
4658 assert(!hr->in_collection_set(), "should not mark objects in the CSet");
4659 #endif // ASSERT
4660 4645
4661 // We know that the object is not moving so it's safe to read its size. 4646 // We know that the object is not moving so it's safe to read its size.
4662 _cm->grayRoot(obj, (size_t) obj->size(), _worker_id); 4647 _cm->grayRoot(obj, (size_t) obj->size(), _worker_id);
4663 } 4648 }
4664 4649
4665 void G1ParCopyHelper::mark_forwarded_object(oop from_obj, oop to_obj) { 4650 void G1ParCopyHelper::mark_forwarded_object(oop from_obj, oop to_obj) {
4666 #ifdef ASSERT
4667 assert(from_obj->is_forwarded(), "from obj should be forwarded"); 4651 assert(from_obj->is_forwarded(), "from obj should be forwarded");
4668 assert(from_obj->forwardee() == to_obj, "to obj should be the forwardee"); 4652 assert(from_obj->forwardee() == to_obj, "to obj should be the forwardee");
4669 assert(from_obj != to_obj, "should not be self-forwarded"); 4653 assert(from_obj != to_obj, "should not be self-forwarded");
4670 4654
4671 HeapRegion* from_hr = _g1->heap_region_containing(from_obj); 4655 assert(_g1->heap_region_containing(from_obj)->in_collection_set(), "from obj should be in the CSet");
4672 assert(from_hr != NULL, "sanity"); 4656 assert(!_g1->heap_region_containing(to_obj)->in_collection_set(), "should not mark objects in the CSet");
4673 assert(from_hr->in_collection_set(), "from obj should be in the CSet");
4674
4675 HeapRegion* to_hr = _g1->heap_region_containing(to_obj);
4676 assert(to_hr != NULL, "sanity");
4677 assert(!to_hr->in_collection_set(), "should not mark objects in the CSet");
4678 #endif // ASSERT
4679 4657
4680 // The object might be in the process of being copied by another 4658 // The object might be in the process of being copied by another
4681 // worker so we cannot trust that its to-space image is 4659 // worker so we cannot trust that its to-space image is
4682 // well-formed. So we have to read its size from its from-space 4660 // well-formed. So we have to read its size from its from-space
4683 // image which we know should not be changing. 4661 // image which we know should not be changing.
6910 _refine_cte_cl->set_concurrent(concurrent); 6888 _refine_cte_cl->set_concurrent(concurrent);
6911 } 6889 }
6912 6890
6913 bool G1CollectedHeap::is_in_closed_subset(const void* p) const { 6891 bool G1CollectedHeap::is_in_closed_subset(const void* p) const {
6914 HeapRegion* hr = heap_region_containing(p); 6892 HeapRegion* hr = heap_region_containing(p);
6915 if (hr == NULL) { 6893 return hr->is_in(p);
6916 return false;
6917 } else {
6918 return hr->is_in(p);
6919 }
6920 } 6894 }
6921 6895
6922 // Methods for the mutator alloc region 6896 // Methods for the mutator alloc region
6923 6897
6924 HeapRegion* G1CollectedHeap::new_mutator_alloc_region(size_t word_size, 6898 HeapRegion* G1CollectedHeap::new_mutator_alloc_region(size_t word_size,