comparison src/share/vm/gc_interface/collectedHeap.cpp @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 9a9bb0010c91
children aed758eda82a
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
112 } else { 112 } else {
113 _gc_heap_log = NULL; 113 _gc_heap_log = NULL;
114 } 114 }
115 } 115 }
116 116
117 // This interface assumes that it's being called by the
118 // vm thread. It collects the heap assuming that the
119 // heap lock is already held and that we are executing in
120 // the context of the vm thread.
121 void CollectedHeap::collect_as_vm_thread(GCCause::Cause cause) {
122 assert(Thread::current()->is_VM_thread(), "Precondition#1");
123 assert(Heap_lock->is_locked(), "Precondition#2");
124 GCCauseSetter gcs(this, cause);
125 switch (cause) {
126 case GCCause::_heap_inspection:
127 case GCCause::_heap_dump:
128 case GCCause::_metadata_GC_threshold : {
129 HandleMark hm;
130 do_full_collection(false); // don't clear all soft refs
131 break;
132 }
133 case GCCause::_last_ditch_collection: {
134 HandleMark hm;
135 do_full_collection(true); // do clear all soft refs
136 break;
137 }
138 default:
139 ShouldNotReachHere(); // Unexpected use of this function
140 }
141 }
142 MetaWord* CollectedHeap::satisfy_failed_metadata_allocation(
143 ClassLoaderData* loader_data,
144 size_t size, Metaspace::MetadataType mdtype) {
145 return collector_policy()->satisfy_failed_metadata_allocation(loader_data, size, mdtype);
146 }
147
148
117 void CollectedHeap::pre_initialize() { 149 void CollectedHeap::pre_initialize() {
118 // Used for ReduceInitialCardMarks (when COMPILER2 is used); 150 // Used for ReduceInitialCardMarks (when COMPILER2 is used);
119 // otherwise remains unused. 151 // otherwise remains unused.
120 #ifdef COMPILER2 152 #ifdef COMPILER2
121 _defer_initial_card_mark = ReduceInitialCardMarks && can_elide_tlab_store_barriers() 153 _defer_initial_card_mark = ReduceInitialCardMarks && can_elide_tlab_store_barriers()
213 // Verify that the storage points to a parsable object in heap 245 // Verify that the storage points to a parsable object in heap
214 DEBUG_ONLY(oop old_obj = oop(deferred.start());) 246 DEBUG_ONLY(oop old_obj = oop(deferred.start());)
215 assert(is_in(old_obj), "Not in allocated heap"); 247 assert(is_in(old_obj), "Not in allocated heap");
216 assert(!can_elide_initializing_store_barrier(old_obj), 248 assert(!can_elide_initializing_store_barrier(old_obj),
217 "Else should have been filtered in new_store_pre_barrier()"); 249 "Else should have been filtered in new_store_pre_barrier()");
218 assert(!is_in_permanent(old_obj), "Sanity: not expected");
219 assert(old_obj->is_oop(true), "Not an oop"); 250 assert(old_obj->is_oop(true), "Not an oop");
220 assert(old_obj->is_parsable(), "Will not be concurrently parsable");
221 assert(deferred.word_size() == (size_t)(old_obj->size()), 251 assert(deferred.word_size() == (size_t)(old_obj->size()),
222 "Mismatch: multiple objects?"); 252 "Mismatch: multiple objects?");
223 } 253 }
224 BarrierSet* bs = barrier_set(); 254 BarrierSet* bs = barrier_set();
225 assert(bs->has_write_region_opt(), "No write_region() on BarrierSet"); 255 assert(bs->has_write_region_opt(), "No write_region() on BarrierSet");
468 oop CollectedHeap::Class_obj_allocate(KlassHandle klass, int size, KlassHandle real_klass, TRAPS) { 498 oop CollectedHeap::Class_obj_allocate(KlassHandle klass, int size, KlassHandle real_klass, TRAPS) {
469 debug_only(check_for_valid_allocation_state()); 499 debug_only(check_for_valid_allocation_state());
470 assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed"); 500 assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed");
471 assert(size >= 0, "int won't convert to size_t"); 501 assert(size >= 0, "int won't convert to size_t");
472 HeapWord* obj; 502 HeapWord* obj;
473 if (JavaObjectsInPerm) {
474 obj = common_permanent_mem_allocate_init(size, CHECK_NULL);
475 } else {
476 assert(ScavengeRootsInCode > 0, "must be"); 503 assert(ScavengeRootsInCode > 0, "must be");
477 obj = common_mem_allocate_init(size, CHECK_NULL); 504 obj = common_mem_allocate_init(size, CHECK_NULL);
478 }
479 post_allocation_setup_common(klass, obj); 505 post_allocation_setup_common(klass, obj);
480 assert(Universe::is_bootstrapping() || 506 assert(Universe::is_bootstrapping() ||
481 !((oop)obj)->blueprint()->oop_is_array(), "must not be an array"); 507 !((oop)obj)->is_array(), "must not be an array");
482 NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value(obj, size)); 508 NOT_PRODUCT(Universe::heap()->check_for_bad_heap_word_value(obj, size));
483 oop mirror = (oop)obj; 509 oop mirror = (oop)obj;
484 510
485 java_lang_Class::set_oop_size(mirror, size); 511 java_lang_Class::set_oop_size(mirror, size);
486 512