Mercurial > hg > truffle
comparison src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp @ 12080:5888334c9c24
7145569: G1: optimize nmethods scanning
Summary: Add a list of nmethods to the RSet for a region that contain references into the region. Skip scanning the code cache during root scanning and scan the nmethod lists during RSet scanning instead.
Reviewed-by: tschatzl, brutisso, mgerdin, twisti, kvn
author | johnc |
---|---|
date | Thu, 15 Aug 2013 10:52:18 +0200 |
parents | 71180a6e5080 |
children | 9720d338b1d5 |
comparison
equal
deleted
inserted
replaced
12033:bd902affe102 | 12080:5888334c9c24 |
---|---|
21 * questions. | 21 * questions. |
22 * | 22 * |
23 */ | 23 */ |
24 | 24 |
25 #include "precompiled.hpp" | 25 #include "precompiled.hpp" |
26 #include "code/codeCache.hpp" | |
26 #include "code/icBuffer.hpp" | 27 #include "code/icBuffer.hpp" |
27 #include "gc_implementation/g1/bufferingOopClosure.hpp" | 28 #include "gc_implementation/g1/bufferingOopClosure.hpp" |
28 #include "gc_implementation/g1/concurrentG1Refine.hpp" | 29 #include "gc_implementation/g1/concurrentG1Refine.hpp" |
29 #include "gc_implementation/g1/concurrentG1RefineThread.hpp" | 30 #include "gc_implementation/g1/concurrentG1RefineThread.hpp" |
30 #include "gc_implementation/g1/concurrentMarkThread.inline.hpp" | 31 #include "gc_implementation/g1/concurrentMarkThread.inline.hpp" |
1174 class PostMCRemSetClearClosure: public HeapRegionClosure { | 1175 class PostMCRemSetClearClosure: public HeapRegionClosure { |
1175 G1CollectedHeap* _g1h; | 1176 G1CollectedHeap* _g1h; |
1176 ModRefBarrierSet* _mr_bs; | 1177 ModRefBarrierSet* _mr_bs; |
1177 public: | 1178 public: |
1178 PostMCRemSetClearClosure(G1CollectedHeap* g1h, ModRefBarrierSet* mr_bs) : | 1179 PostMCRemSetClearClosure(G1CollectedHeap* g1h, ModRefBarrierSet* mr_bs) : |
1179 _g1h(g1h), _mr_bs(mr_bs) { } | 1180 _g1h(g1h), _mr_bs(mr_bs) {} |
1181 | |
1180 bool doHeapRegion(HeapRegion* r) { | 1182 bool doHeapRegion(HeapRegion* r) { |
1183 HeapRegionRemSet* hrrs = r->rem_set(); | |
1184 | |
1181 if (r->continuesHumongous()) { | 1185 if (r->continuesHumongous()) { |
1186 // We'll assert that the strong code root list and RSet is empty | |
1187 assert(hrrs->strong_code_roots_list_length() == 0, "sanity"); | |
1188 assert(hrrs->occupied() == 0, "RSet should be empty"); | |
1182 return false; | 1189 return false; |
1183 } | 1190 } |
1191 | |
1184 _g1h->reset_gc_time_stamps(r); | 1192 _g1h->reset_gc_time_stamps(r); |
1185 HeapRegionRemSet* hrrs = r->rem_set(); | 1193 hrrs->clear(); |
1186 if (hrrs != NULL) hrrs->clear(); | |
1187 // You might think here that we could clear just the cards | 1194 // You might think here that we could clear just the cards |
1188 // corresponding to the used region. But no: if we leave a dirty card | 1195 // corresponding to the used region. But no: if we leave a dirty card |
1189 // in a region we might allocate into, then it would prevent that card | 1196 // in a region we might allocate into, then it would prevent that card |
1190 // from being enqueued, and cause it to be missed. | 1197 // from being enqueued, and cause it to be missed. |
1191 // Re: the performance cost: we shouldn't be doing full GC anyway! | 1198 // Re: the performance cost: we shouldn't be doing full GC anyway! |
1192 _mr_bs->clear(MemRegion(r->bottom(), r->end())); | 1199 _mr_bs->clear(MemRegion(r->bottom(), r->end())); |
1200 | |
1193 return false; | 1201 return false; |
1194 } | 1202 } |
1195 }; | 1203 }; |
1196 | 1204 |
1197 void G1CollectedHeap::clear_rsets_post_compaction() { | 1205 void G1CollectedHeap::clear_rsets_post_compaction() { |
1267 void G1CollectedHeap::print_hrs_post_compaction() { | 1275 void G1CollectedHeap::print_hrs_post_compaction() { |
1268 PostCompactionPrinterClosure cl(hr_printer()); | 1276 PostCompactionPrinterClosure cl(hr_printer()); |
1269 heap_region_iterate(&cl); | 1277 heap_region_iterate(&cl); |
1270 } | 1278 } |
1271 | 1279 |
1272 double G1CollectedHeap::verify(bool guard, const char* msg) { | |
1273 double verify_time_ms = 0.0; | |
1274 | |
1275 if (guard && total_collections() >= VerifyGCStartAt) { | |
1276 double verify_start = os::elapsedTime(); | |
1277 HandleMark hm; // Discard invalid handles created during verification | |
1278 prepare_for_verify(); | |
1279 Universe::verify(VerifyOption_G1UsePrevMarking, msg); | |
1280 verify_time_ms = (os::elapsedTime() - verify_start) * 1000; | |
1281 } | |
1282 | |
1283 return verify_time_ms; | |
1284 } | |
1285 | |
1286 void G1CollectedHeap::verify_before_gc() { | |
1287 double verify_time_ms = verify(VerifyBeforeGC, " VerifyBeforeGC:"); | |
1288 g1_policy()->phase_times()->record_verify_before_time_ms(verify_time_ms); | |
1289 } | |
1290 | |
1291 void G1CollectedHeap::verify_after_gc() { | |
1292 double verify_time_ms = verify(VerifyAfterGC, " VerifyAfterGC:"); | |
1293 g1_policy()->phase_times()->record_verify_after_time_ms(verify_time_ms); | |
1294 } | |
1295 | |
1296 bool G1CollectedHeap::do_collection(bool explicit_gc, | 1280 bool G1CollectedHeap::do_collection(bool explicit_gc, |
1297 bool clear_all_soft_refs, | 1281 bool clear_all_soft_refs, |
1298 size_t word_size) { | 1282 size_t word_size) { |
1299 assert_at_safepoint(true /* should_be_vm_thread */); | 1283 assert_at_safepoint(true /* should_be_vm_thread */); |
1300 | 1284 |
1431 assert(!ref_processor_stw()->discovery_enabled(), "Postcondition"); | 1415 assert(!ref_processor_stw()->discovery_enabled(), "Postcondition"); |
1432 ref_processor_stw()->verify_no_references_recorded(); | 1416 ref_processor_stw()->verify_no_references_recorded(); |
1433 | 1417 |
1434 // Delete metaspaces for unloaded class loaders and clean up loader_data graph | 1418 // Delete metaspaces for unloaded class loaders and clean up loader_data graph |
1435 ClassLoaderDataGraph::purge(); | 1419 ClassLoaderDataGraph::purge(); |
1436 MetaspaceAux::verify_metrics(); | 1420 MetaspaceAux::verify_metrics(); |
1437 | 1421 |
1438 // Note: since we've just done a full GC, concurrent | 1422 // Note: since we've just done a full GC, concurrent |
1439 // marking is no longer active. Therefore we need not | 1423 // marking is no longer active. Therefore we need not |
1440 // re-enable reference discovery for the CM ref processor. | 1424 // re-enable reference discovery for the CM ref processor. |
1441 // That will be done at the start of the next marking cycle. | 1425 // That will be done at the start of the next marking cycle. |
1501 reset_heap_region_claim_values(); | 1485 reset_heap_region_claim_values(); |
1502 } else { | 1486 } else { |
1503 RebuildRSOutOfRegionClosure rebuild_rs(this); | 1487 RebuildRSOutOfRegionClosure rebuild_rs(this); |
1504 heap_region_iterate(&rebuild_rs); | 1488 heap_region_iterate(&rebuild_rs); |
1505 } | 1489 } |
1490 | |
1491 // Rebuild the strong code root lists for each region | |
1492 rebuild_strong_code_roots(); | |
1506 | 1493 |
1507 if (true) { // FIXME | 1494 if (true) { // FIXME |
1508 MetaspaceGC::compute_new_size(); | 1495 MetaspaceGC::compute_new_size(); |
1509 } | 1496 } |
1510 | 1497 |
3107 default: ShouldNotReachHere(); | 3094 default: ShouldNotReachHere(); |
3108 } | 3095 } |
3109 return NULL; // keep some compilers happy | 3096 return NULL; // keep some compilers happy |
3110 } | 3097 } |
3111 | 3098 |
3099 // TODO: VerifyRootsClosure extends OopsInGenClosure so that we can | |
3100 // pass it as the perm_blk to SharedHeap::process_strong_roots. | |
3101 // When process_strong_roots stop calling perm_blk->younger_refs_iterate | |
3102 // we can change this closure to extend the simpler OopClosure. | |
3103 class VerifyRootsClosure: public OopsInGenClosure { | |
3104 private: | |
3105 G1CollectedHeap* _g1h; | |
3106 VerifyOption _vo; | |
3107 bool _failures; | |
3108 public: | |
3109 // _vo == UsePrevMarking -> use "prev" marking information, | |
3110 // _vo == UseNextMarking -> use "next" marking information, | |
3111 // _vo == UseMarkWord -> use mark word from object header. | |
3112 VerifyRootsClosure(VerifyOption vo) : | |
3113 _g1h(G1CollectedHeap::heap()), | |
3114 _vo(vo), | |
3115 _failures(false) { } | |
3116 | |
3117 bool failures() { return _failures; } | |
3118 | |
3119 template <class T> void do_oop_nv(T* p) { | |
3120 T heap_oop = oopDesc::load_heap_oop(p); | |
3121 if (!oopDesc::is_null(heap_oop)) { | |
3122 oop obj = oopDesc::decode_heap_oop_not_null(heap_oop); | |
3123 if (_g1h->is_obj_dead_cond(obj, _vo)) { | |
3124 gclog_or_tty->print_cr("Root location "PTR_FORMAT" " | |
3125 "points to dead obj "PTR_FORMAT, p, (void*) obj); | |
3126 if (_vo == VerifyOption_G1UseMarkWord) { | |
3127 gclog_or_tty->print_cr(" Mark word: "PTR_FORMAT, (void*)(obj->mark())); | |
3128 } | |
3129 obj->print_on(gclog_or_tty); | |
3130 _failures = true; | |
3131 } | |
3132 } | |
3133 } | |
3134 | |
3135 void do_oop(oop* p) { do_oop_nv(p); } | |
3136 void do_oop(narrowOop* p) { do_oop_nv(p); } | |
3137 }; | |
3138 | |
3139 class G1VerifyCodeRootOopClosure: public OopsInGenClosure { | |
3140 G1CollectedHeap* _g1h; | |
3141 OopClosure* _root_cl; | |
3142 nmethod* _nm; | |
3143 VerifyOption _vo; | |
3144 bool _failures; | |
3145 | |
3146 template <class T> void do_oop_work(T* p) { | |
3147 // First verify that this root is live | |
3148 _root_cl->do_oop(p); | |
3149 | |
3150 if (!G1VerifyHeapRegionCodeRoots) { | |
3151 // We're not verifying the code roots attached to heap region. | |
3152 return; | |
3153 } | |
3154 | |
3155 // Don't check the code roots during marking verification in a full GC | |
3156 if (_vo == VerifyOption_G1UseMarkWord) { | |
3157 return; | |
3158 } | |
3159 | |
3160 // Now verify that the current nmethod (which contains p) is | |
3161 // in the code root list of the heap region containing the | |
3162 // object referenced by p. | |
3163 | |
3164 T heap_oop = oopDesc::load_heap_oop(p); | |
3165 if (!oopDesc::is_null(heap_oop)) { | |
3166 oop obj = oopDesc::decode_heap_oop_not_null(heap_oop); | |
3167 | |
3168 // Now fetch the region containing the object | |
3169 HeapRegion* hr = _g1h->heap_region_containing(obj); | |
3170 HeapRegionRemSet* hrrs = hr->rem_set(); | |
3171 // Verify that the strong code root list for this region | |
3172 // contains the nmethod | |
3173 if (!hrrs->strong_code_roots_list_contains(_nm)) { | |
3174 gclog_or_tty->print_cr("Code root location "PTR_FORMAT" " | |
3175 "from nmethod "PTR_FORMAT" not in strong " | |
3176 "code roots for region ["PTR_FORMAT","PTR_FORMAT")", | |
3177 p, _nm, hr->bottom(), hr->end()); | |
3178 _failures = true; | |
3179 } | |
3180 } | |
3181 } | |
3182 | |
3183 public: | |
3184 G1VerifyCodeRootOopClosure(G1CollectedHeap* g1h, OopClosure* root_cl, VerifyOption vo): | |
3185 _g1h(g1h), _root_cl(root_cl), _vo(vo), _nm(NULL), _failures(false) {} | |
3186 | |
3187 void do_oop(oop* p) { do_oop_work(p); } | |
3188 void do_oop(narrowOop* p) { do_oop_work(p); } | |
3189 | |
3190 void set_nmethod(nmethod* nm) { _nm = nm; } | |
3191 bool failures() { return _failures; } | |
3192 }; | |
3193 | |
3194 class G1VerifyCodeRootBlobClosure: public CodeBlobClosure { | |
3195 G1VerifyCodeRootOopClosure* _oop_cl; | |
3196 | |
3197 public: | |
3198 G1VerifyCodeRootBlobClosure(G1VerifyCodeRootOopClosure* oop_cl): | |
3199 _oop_cl(oop_cl) {} | |
3200 | |
3201 void do_code_blob(CodeBlob* cb) { | |
3202 nmethod* nm = cb->as_nmethod_or_null(); | |
3203 if (nm != NULL) { | |
3204 _oop_cl->set_nmethod(nm); | |
3205 nm->oops_do(_oop_cl); | |
3206 } | |
3207 } | |
3208 }; | |
3209 | |
3210 class YoungRefCounterClosure : public OopClosure { | |
3211 G1CollectedHeap* _g1h; | |
3212 int _count; | |
3213 public: | |
3214 YoungRefCounterClosure(G1CollectedHeap* g1h) : _g1h(g1h), _count(0) {} | |
3215 void do_oop(oop* p) { if (_g1h->is_in_young(*p)) { _count++; } } | |
3216 void do_oop(narrowOop* p) { ShouldNotReachHere(); } | |
3217 | |
3218 int count() { return _count; } | |
3219 void reset_count() { _count = 0; }; | |
3220 }; | |
3221 | |
3222 class VerifyKlassClosure: public KlassClosure { | |
3223 YoungRefCounterClosure _young_ref_counter_closure; | |
3224 OopClosure *_oop_closure; | |
3225 public: | |
3226 VerifyKlassClosure(G1CollectedHeap* g1h, OopClosure* cl) : _young_ref_counter_closure(g1h), _oop_closure(cl) {} | |
3227 void do_klass(Klass* k) { | |
3228 k->oops_do(_oop_closure); | |
3229 | |
3230 _young_ref_counter_closure.reset_count(); | |
3231 k->oops_do(&_young_ref_counter_closure); | |
3232 if (_young_ref_counter_closure.count() > 0) { | |
3233 guarantee(k->has_modified_oops(), err_msg("Klass %p, has young refs but is not dirty.", k)); | |
3234 } | |
3235 } | |
3236 }; | |
3237 | |
3112 class VerifyLivenessOopClosure: public OopClosure { | 3238 class VerifyLivenessOopClosure: public OopClosure { |
3113 G1CollectedHeap* _g1h; | 3239 G1CollectedHeap* _g1h; |
3114 VerifyOption _vo; | 3240 VerifyOption _vo; |
3115 public: | 3241 public: |
3116 VerifyLivenessOopClosure(G1CollectedHeap* g1h, VerifyOption vo): | 3242 VerifyLivenessOopClosure(G1CollectedHeap* g1h, VerifyOption vo): |
3240 } | 3366 } |
3241 return false; // stop the region iteration if we hit a failure | 3367 return false; // stop the region iteration if we hit a failure |
3242 } | 3368 } |
3243 }; | 3369 }; |
3244 | 3370 |
3245 class YoungRefCounterClosure : public OopClosure { | 3371 // This is the task used for parallel verification of the heap regions |
3246 G1CollectedHeap* _g1h; | |
3247 int _count; | |
3248 public: | |
3249 YoungRefCounterClosure(G1CollectedHeap* g1h) : _g1h(g1h), _count(0) {} | |
3250 void do_oop(oop* p) { if (_g1h->is_in_young(*p)) { _count++; } } | |
3251 void do_oop(narrowOop* p) { ShouldNotReachHere(); } | |
3252 | |
3253 int count() { return _count; } | |
3254 void reset_count() { _count = 0; }; | |
3255 }; | |
3256 | |
3257 class VerifyKlassClosure: public KlassClosure { | |
3258 YoungRefCounterClosure _young_ref_counter_closure; | |
3259 OopClosure *_oop_closure; | |
3260 public: | |
3261 VerifyKlassClosure(G1CollectedHeap* g1h, OopClosure* cl) : _young_ref_counter_closure(g1h), _oop_closure(cl) {} | |
3262 void do_klass(Klass* k) { | |
3263 k->oops_do(_oop_closure); | |
3264 | |
3265 _young_ref_counter_closure.reset_count(); | |
3266 k->oops_do(&_young_ref_counter_closure); | |
3267 if (_young_ref_counter_closure.count() > 0) { | |
3268 guarantee(k->has_modified_oops(), err_msg("Klass %p, has young refs but is not dirty.", k)); | |
3269 } | |
3270 } | |
3271 }; | |
3272 | |
3273 // TODO: VerifyRootsClosure extends OopsInGenClosure so that we can | |
3274 // pass it as the perm_blk to SharedHeap::process_strong_roots. | |
3275 // When process_strong_roots stop calling perm_blk->younger_refs_iterate | |
3276 // we can change this closure to extend the simpler OopClosure. | |
3277 class VerifyRootsClosure: public OopsInGenClosure { | |
3278 private: | |
3279 G1CollectedHeap* _g1h; | |
3280 VerifyOption _vo; | |
3281 bool _failures; | |
3282 public: | |
3283 // _vo == UsePrevMarking -> use "prev" marking information, | |
3284 // _vo == UseNextMarking -> use "next" marking information, | |
3285 // _vo == UseMarkWord -> use mark word from object header. | |
3286 VerifyRootsClosure(VerifyOption vo) : | |
3287 _g1h(G1CollectedHeap::heap()), | |
3288 _vo(vo), | |
3289 _failures(false) { } | |
3290 | |
3291 bool failures() { return _failures; } | |
3292 | |
3293 template <class T> void do_oop_nv(T* p) { | |
3294 T heap_oop = oopDesc::load_heap_oop(p); | |
3295 if (!oopDesc::is_null(heap_oop)) { | |
3296 oop obj = oopDesc::decode_heap_oop_not_null(heap_oop); | |
3297 if (_g1h->is_obj_dead_cond(obj, _vo)) { | |
3298 gclog_or_tty->print_cr("Root location "PTR_FORMAT" " | |
3299 "points to dead obj "PTR_FORMAT, p, (void*) obj); | |
3300 if (_vo == VerifyOption_G1UseMarkWord) { | |
3301 gclog_or_tty->print_cr(" Mark word: "PTR_FORMAT, (void*)(obj->mark())); | |
3302 } | |
3303 obj->print_on(gclog_or_tty); | |
3304 _failures = true; | |
3305 } | |
3306 } | |
3307 } | |
3308 | |
3309 void do_oop(oop* p) { do_oop_nv(p); } | |
3310 void do_oop(narrowOop* p) { do_oop_nv(p); } | |
3311 }; | |
3312 | |
3313 // This is the task used for parallel heap verification. | |
3314 | 3372 |
3315 class G1ParVerifyTask: public AbstractGangTask { | 3373 class G1ParVerifyTask: public AbstractGangTask { |
3316 private: | 3374 private: |
3317 G1CollectedHeap* _g1h; | 3375 G1CollectedHeap* _g1h; |
3318 VerifyOption _vo; | 3376 VerifyOption _vo; |
3342 _failures = true; | 3400 _failures = true; |
3343 } | 3401 } |
3344 } | 3402 } |
3345 }; | 3403 }; |
3346 | 3404 |
3347 void G1CollectedHeap::verify(bool silent) { | 3405 void G1CollectedHeap::verify(bool silent, VerifyOption vo) { |
3348 verify(silent, VerifyOption_G1UsePrevMarking); | |
3349 } | |
3350 | |
3351 void G1CollectedHeap::verify(bool silent, | |
3352 VerifyOption vo) { | |
3353 if (SafepointSynchronize::is_at_safepoint()) { | 3406 if (SafepointSynchronize::is_at_safepoint()) { |
3407 assert(Thread::current()->is_VM_thread(), | |
3408 "Expected to be executed serially by the VM thread at this point"); | |
3409 | |
3354 if (!silent) { gclog_or_tty->print("Roots "); } | 3410 if (!silent) { gclog_or_tty->print("Roots "); } |
3355 VerifyRootsClosure rootsCl(vo); | 3411 VerifyRootsClosure rootsCl(vo); |
3356 | 3412 G1VerifyCodeRootOopClosure codeRootsCl(this, &rootsCl, vo); |
3357 assert(Thread::current()->is_VM_thread(), | 3413 G1VerifyCodeRootBlobClosure blobsCl(&codeRootsCl); |
3358 "Expected to be executed serially by the VM thread at this point"); | |
3359 | |
3360 CodeBlobToOopClosure blobsCl(&rootsCl, /*do_marking=*/ false); | |
3361 VerifyKlassClosure klassCl(this, &rootsCl); | 3414 VerifyKlassClosure klassCl(this, &rootsCl); |
3362 | 3415 |
3363 // We apply the relevant closures to all the oops in the | 3416 // We apply the relevant closures to all the oops in the |
3364 // system dictionary, the string table and the code cache. | 3417 // system dictionary, the string table and the code cache. |
3365 const int so = SO_AllClasses | SO_Strings | SO_CodeCache; | 3418 const int so = SO_AllClasses | SO_Strings | SO_CodeCache; |
3374 &rootsCl, | 3427 &rootsCl, |
3375 &blobsCl, | 3428 &blobsCl, |
3376 &klassCl | 3429 &klassCl |
3377 ); | 3430 ); |
3378 | 3431 |
3379 bool failures = rootsCl.failures(); | 3432 bool failures = rootsCl.failures() || codeRootsCl.failures(); |
3380 | 3433 |
3381 if (vo != VerifyOption_G1UseMarkWord) { | 3434 if (vo != VerifyOption_G1UseMarkWord) { |
3382 // If we're verifying during a full GC then the region sets | 3435 // If we're verifying during a full GC then the region sets |
3383 // will have been torn down at the start of the GC. Therefore | 3436 // will have been torn down at the start of the GC. Therefore |
3384 // verifying the region sets will fail. So we only verify | 3437 // verifying the region sets will fail. So we only verify |
3441 guarantee(!failures, "there should not have been any failures"); | 3494 guarantee(!failures, "there should not have been any failures"); |
3442 } else { | 3495 } else { |
3443 if (!silent) | 3496 if (!silent) |
3444 gclog_or_tty->print("(SKIPPING roots, heapRegionSets, heapRegions, remset) "); | 3497 gclog_or_tty->print("(SKIPPING roots, heapRegionSets, heapRegions, remset) "); |
3445 } | 3498 } |
3499 } | |
3500 | |
3501 void G1CollectedHeap::verify(bool silent) { | |
3502 verify(silent, VerifyOption_G1UsePrevMarking); | |
3503 } | |
3504 | |
3505 double G1CollectedHeap::verify(bool guard, const char* msg) { | |
3506 double verify_time_ms = 0.0; | |
3507 | |
3508 if (guard && total_collections() >= VerifyGCStartAt) { | |
3509 double verify_start = os::elapsedTime(); | |
3510 HandleMark hm; // Discard invalid handles created during verification | |
3511 prepare_for_verify(); | |
3512 Universe::verify(VerifyOption_G1UsePrevMarking, msg); | |
3513 verify_time_ms = (os::elapsedTime() - verify_start) * 1000; | |
3514 } | |
3515 | |
3516 return verify_time_ms; | |
3517 } | |
3518 | |
3519 void G1CollectedHeap::verify_before_gc() { | |
3520 double verify_time_ms = verify(VerifyBeforeGC, " VerifyBeforeGC:"); | |
3521 g1_policy()->phase_times()->record_verify_before_time_ms(verify_time_ms); | |
3522 } | |
3523 | |
3524 void G1CollectedHeap::verify_after_gc() { | |
3525 double verify_time_ms = verify(VerifyAfterGC, " VerifyAfterGC:"); | |
3526 g1_policy()->phase_times()->record_verify_after_time_ms(verify_time_ms); | |
3446 } | 3527 } |
3447 | 3528 |
3448 class PrintRegionClosure: public HeapRegionClosure { | 3529 class PrintRegionClosure: public HeapRegionClosure { |
3449 outputStream* _st; | 3530 outputStream* _st; |
3450 public: | 3531 public: |
3864 // get entries from the secondary_free_list. | 3945 // get entries from the secondary_free_list. |
3865 if (!G1StressConcRegionFreeing) { | 3946 if (!G1StressConcRegionFreeing) { |
3866 append_secondary_free_list_if_not_empty_with_lock(); | 3947 append_secondary_free_list_if_not_empty_with_lock(); |
3867 } | 3948 } |
3868 | 3949 |
3869 assert(check_young_list_well_formed(), | 3950 assert(check_young_list_well_formed(), "young list should be well formed"); |
3870 "young list should be well formed"); | 3951 assert(check_heap_region_claim_values(HeapRegion::InitialClaimValue), |
3952 "sanity check"); | |
3871 | 3953 |
3872 // Don't dynamically change the number of GC threads this early. A value of | 3954 // Don't dynamically change the number of GC threads this early. A value of |
3873 // 0 is used to indicate serial work. When parallel work is done, | 3955 // 0 is used to indicate serial work. When parallel work is done, |
3874 // it will be set. | 3956 // it will be set. |
3875 | 3957 |
4985 scan_klasses_cl = &scan_mark_klasses_cl_s; | 5067 scan_klasses_cl = &scan_mark_klasses_cl_s; |
4986 } | 5068 } |
4987 | 5069 |
4988 G1ParPushHeapRSClosure push_heap_rs_cl(_g1h, &pss); | 5070 G1ParPushHeapRSClosure push_heap_rs_cl(_g1h, &pss); |
4989 | 5071 |
4990 int so = SharedHeap::SO_AllClasses | SharedHeap::SO_Strings | SharedHeap::SO_CodeCache; | 5072 // Don't scan the scavengable methods in the code cache as part |
5073 // of strong root scanning. The code roots that point into a | |
5074 // region in the collection set are scanned when we scan the | |
5075 // region's RSet. | |
5076 int so = SharedHeap::SO_AllClasses | SharedHeap::SO_Strings; | |
4991 | 5077 |
4992 pss.start_strong_roots(); | 5078 pss.start_strong_roots(); |
4993 _g1h->g1_process_strong_roots(/* is scavenging */ true, | 5079 _g1h->g1_process_strong_roots(/* is scavenging */ true, |
4994 SharedHeap::ScanningOption(so), | 5080 SharedHeap::ScanningOption(so), |
4995 scan_root_cl, | 5081 scan_root_cl, |
5027 } | 5113 } |
5028 }; | 5114 }; |
5029 | 5115 |
5030 // *** Common G1 Evacuation Stuff | 5116 // *** Common G1 Evacuation Stuff |
5031 | 5117 |
5032 // Closures that support the filtering of CodeBlobs scanned during | |
5033 // external root scanning. | |
5034 | |
5035 // Closure applied to reference fields in code blobs (specifically nmethods) | |
5036 // to determine whether an nmethod contains references that point into | |
5037 // the collection set. Used as a predicate when walking code roots so | |
5038 // that only nmethods that point into the collection set are added to the | |
5039 // 'marked' list. | |
5040 | |
5041 class G1FilteredCodeBlobToOopClosure : public CodeBlobToOopClosure { | |
5042 | |
5043 class G1PointsIntoCSOopClosure : public OopClosure { | |
5044 G1CollectedHeap* _g1; | |
5045 bool _points_into_cs; | |
5046 public: | |
5047 G1PointsIntoCSOopClosure(G1CollectedHeap* g1) : | |
5048 _g1(g1), _points_into_cs(false) { } | |
5049 | |
5050 bool points_into_cs() const { return _points_into_cs; } | |
5051 | |
5052 template <class T> | |
5053 void do_oop_nv(T* p) { | |
5054 if (!_points_into_cs) { | |
5055 T heap_oop = oopDesc::load_heap_oop(p); | |
5056 if (!oopDesc::is_null(heap_oop) && | |
5057 _g1->in_cset_fast_test(oopDesc::decode_heap_oop_not_null(heap_oop))) { | |
5058 _points_into_cs = true; | |
5059 } | |
5060 } | |
5061 } | |
5062 | |
5063 virtual void do_oop(oop* p) { do_oop_nv(p); } | |
5064 virtual void do_oop(narrowOop* p) { do_oop_nv(p); } | |
5065 }; | |
5066 | |
5067 G1CollectedHeap* _g1; | |
5068 | |
5069 public: | |
5070 G1FilteredCodeBlobToOopClosure(G1CollectedHeap* g1, OopClosure* cl) : | |
5071 CodeBlobToOopClosure(cl, true), _g1(g1) { } | |
5072 | |
5073 virtual void do_code_blob(CodeBlob* cb) { | |
5074 nmethod* nm = cb->as_nmethod_or_null(); | |
5075 if (nm != NULL && !(nm->test_oops_do_mark())) { | |
5076 G1PointsIntoCSOopClosure predicate_cl(_g1); | |
5077 nm->oops_do(&predicate_cl); | |
5078 | |
5079 if (predicate_cl.points_into_cs()) { | |
5080 // At least one of the reference fields or the oop relocations | |
5081 // in the nmethod points into the collection set. We have to | |
5082 // 'mark' this nmethod. | |
5083 // Note: Revisit the following if CodeBlobToOopClosure::do_code_blob() | |
5084 // or MarkingCodeBlobClosure::do_code_blob() change. | |
5085 if (!nm->test_set_oops_do_mark()) { | |
5086 do_newly_marked_nmethod(nm); | |
5087 } | |
5088 } | |
5089 } | |
5090 } | |
5091 }; | |
5092 | |
5093 // This method is run in a GC worker. | 5118 // This method is run in a GC worker. |
5094 | 5119 |
5095 void | 5120 void |
5096 G1CollectedHeap:: | 5121 G1CollectedHeap:: |
5097 g1_process_strong_roots(bool is_scavenging, | 5122 g1_process_strong_roots(bool is_scavenging, |
5105 double ext_roots_start = os::elapsedTime(); | 5130 double ext_roots_start = os::elapsedTime(); |
5106 double closure_app_time_sec = 0.0; | 5131 double closure_app_time_sec = 0.0; |
5107 | 5132 |
5108 BufferingOopClosure buf_scan_non_heap_roots(scan_non_heap_roots); | 5133 BufferingOopClosure buf_scan_non_heap_roots(scan_non_heap_roots); |
5109 | 5134 |
5110 // Walk the code cache w/o buffering, because StarTask cannot handle | 5135 assert(so & SO_CodeCache || scan_rs != NULL, "must scan code roots somehow"); |
5111 // unaligned oop locations. | 5136 // Walk the code cache/strong code roots w/o buffering, because StarTask |
5112 G1FilteredCodeBlobToOopClosure eager_scan_code_roots(this, scan_non_heap_roots); | 5137 // cannot handle unaligned oop locations. |
5138 CodeBlobToOopClosure eager_scan_code_roots(scan_non_heap_roots, true /* do_marking */); | |
5113 | 5139 |
5114 process_strong_roots(false, // no scoping; this is parallel code | 5140 process_strong_roots(false, // no scoping; this is parallel code |
5115 is_scavenging, so, | 5141 is_scavenging, so, |
5116 &buf_scan_non_heap_roots, | 5142 &buf_scan_non_heap_roots, |
5117 &eager_scan_code_roots, | 5143 &eager_scan_code_roots, |
5152 satb_filtering_ms = (os::elapsedTime() - satb_filter_start) * 1000.0; | 5178 satb_filtering_ms = (os::elapsedTime() - satb_filter_start) * 1000.0; |
5153 } | 5179 } |
5154 } | 5180 } |
5155 g1_policy()->phase_times()->record_satb_filtering_time(worker_i, satb_filtering_ms); | 5181 g1_policy()->phase_times()->record_satb_filtering_time(worker_i, satb_filtering_ms); |
5156 | 5182 |
5183 // If this is an initial mark pause, and we're not scanning | |
5184 // the entire code cache, we need to mark the oops in the | |
5185 // strong code root lists for the regions that are not in | |
5186 // the collection set. | |
5187 // Note all threads participate in this set of root tasks. | |
5188 double mark_strong_code_roots_ms = 0.0; | |
5189 if (g1_policy()->during_initial_mark_pause() && !(so & SO_CodeCache)) { | |
5190 double mark_strong_roots_start = os::elapsedTime(); | |
5191 mark_strong_code_roots(worker_i); | |
5192 mark_strong_code_roots_ms = (os::elapsedTime() - mark_strong_roots_start) * 1000.0; | |
5193 } | |
5194 g1_policy()->phase_times()->record_strong_code_root_mark_time(worker_i, mark_strong_code_roots_ms); | |
5195 | |
5157 // Now scan the complement of the collection set. | 5196 // Now scan the complement of the collection set. |
5158 if (scan_rs != NULL) { | 5197 if (scan_rs != NULL) { |
5159 g1_rem_set()->oops_into_collection_set_do(scan_rs, worker_i); | 5198 g1_rem_set()->oops_into_collection_set_do(scan_rs, &eager_scan_code_roots, worker_i); |
5160 } | 5199 } |
5161 _process_strong_tasks->all_tasks_completed(); | 5200 _process_strong_tasks->all_tasks_completed(); |
5162 } | 5201 } |
5163 | 5202 |
5164 void | 5203 void |
5772 // objects (and their reachable sub-graphs) that were | 5811 // objects (and their reachable sub-graphs) that were |
5773 // not copied during the pause. | 5812 // not copied during the pause. |
5774 process_discovered_references(n_workers); | 5813 process_discovered_references(n_workers); |
5775 | 5814 |
5776 // Weak root processing. | 5815 // Weak root processing. |
5777 // Note: when JSR 292 is enabled and code blobs can contain | |
5778 // non-perm oops then we will need to process the code blobs | |
5779 // here too. | |
5780 { | 5816 { |
5781 G1STWIsAliveClosure is_alive(this); | 5817 G1STWIsAliveClosure is_alive(this); |
5782 G1KeepAliveClosure keep_alive(this); | 5818 G1KeepAliveClosure keep_alive(this); |
5783 JNIHandles::weak_oops_do(&is_alive, &keep_alive); | 5819 JNIHandles::weak_oops_do(&is_alive, &keep_alive); |
5784 } | 5820 } |
5789 // Reset and re-enable the hot card cache. | 5825 // Reset and re-enable the hot card cache. |
5790 // Note the counts for the cards in the regions in the | 5826 // Note the counts for the cards in the regions in the |
5791 // collection set are reset when the collection set is freed. | 5827 // collection set are reset when the collection set is freed. |
5792 hot_card_cache->reset_hot_cache(); | 5828 hot_card_cache->reset_hot_cache(); |
5793 hot_card_cache->set_use_cache(true); | 5829 hot_card_cache->set_use_cache(true); |
5830 | |
5831 // Migrate the strong code roots attached to each region in | |
5832 // the collection set. Ideally we would like to do this | |
5833 // after we have finished the scanning/evacuation of the | |
5834 // strong code roots for a particular heap region. | |
5835 migrate_strong_code_roots(); | |
5836 | |
5837 if (g1_policy()->during_initial_mark_pause()) { | |
5838 // Reset the claim values set during marking the strong code roots | |
5839 reset_heap_region_claim_values(); | |
5840 } | |
5794 | 5841 |
5795 finalize_for_evac_failure(); | 5842 finalize_for_evac_failure(); |
5796 | 5843 |
5797 if (evacuation_failed()) { | 5844 if (evacuation_failed()) { |
5798 remove_self_forwarding_pointers(); | 5845 remove_self_forwarding_pointers(); |
6586 | 6633 |
6587 _old_set.verify_end(); | 6634 _old_set.verify_end(); |
6588 _humongous_set.verify_end(); | 6635 _humongous_set.verify_end(); |
6589 _free_list.verify_end(); | 6636 _free_list.verify_end(); |
6590 } | 6637 } |
6638 | |
6639 // Optimized nmethod scanning | |
6640 | |
6641 class RegisterNMethodOopClosure: public OopClosure { | |
6642 G1CollectedHeap* _g1h; | |
6643 nmethod* _nm; | |
6644 | |
6645 template <class T> void do_oop_work(T* p) { | |
6646 T heap_oop = oopDesc::load_heap_oop(p); | |
6647 if (!oopDesc::is_null(heap_oop)) { | |
6648 oop obj = oopDesc::decode_heap_oop_not_null(heap_oop); | |
6649 HeapRegion* hr = _g1h->heap_region_containing(obj); | |
6650 assert(!hr->isHumongous(), "code root in humongous region?"); | |
6651 | |
6652 // HeapRegion::add_strong_code_root() avoids adding duplicate | |
6653 // entries but having duplicates is OK since we "mark" nmethods | |
6654 // as visited when we scan the strong code root lists during the GC. | |
6655 hr->add_strong_code_root(_nm); | |
6656 assert(hr->rem_set()->strong_code_roots_list_contains(_nm), "add failed?"); | |
6657 } | |
6658 } | |
6659 | |
6660 public: | |
6661 RegisterNMethodOopClosure(G1CollectedHeap* g1h, nmethod* nm) : | |
6662 _g1h(g1h), _nm(nm) {} | |
6663 | |
6664 void do_oop(oop* p) { do_oop_work(p); } | |
6665 void do_oop(narrowOop* p) { do_oop_work(p); } | |
6666 }; | |
6667 | |
6668 class UnregisterNMethodOopClosure: public OopClosure { | |
6669 G1CollectedHeap* _g1h; | |
6670 nmethod* _nm; | |
6671 | |
6672 template <class T> void do_oop_work(T* p) { | |
6673 T heap_oop = oopDesc::load_heap_oop(p); | |
6674 if (!oopDesc::is_null(heap_oop)) { | |
6675 oop obj = oopDesc::decode_heap_oop_not_null(heap_oop); | |
6676 HeapRegion* hr = _g1h->heap_region_containing(obj); | |
6677 assert(!hr->isHumongous(), "code root in humongous region?"); | |
6678 hr->remove_strong_code_root(_nm); | |
6679 assert(!hr->rem_set()->strong_code_roots_list_contains(_nm), "remove failed?"); | |
6680 } | |
6681 } | |
6682 | |
6683 public: | |
6684 UnregisterNMethodOopClosure(G1CollectedHeap* g1h, nmethod* nm) : | |
6685 _g1h(g1h), _nm(nm) {} | |
6686 | |
6687 void do_oop(oop* p) { do_oop_work(p); } | |
6688 void do_oop(narrowOop* p) { do_oop_work(p); } | |
6689 }; | |
6690 | |
6691 void G1CollectedHeap::register_nmethod(nmethod* nm) { | |
6692 CollectedHeap::register_nmethod(nm); | |
6693 | |
6694 guarantee(nm != NULL, "sanity"); | |
6695 RegisterNMethodOopClosure reg_cl(this, nm); | |
6696 nm->oops_do(®_cl); | |
6697 } | |
6698 | |
6699 void G1CollectedHeap::unregister_nmethod(nmethod* nm) { | |
6700 CollectedHeap::unregister_nmethod(nm); | |
6701 | |
6702 guarantee(nm != NULL, "sanity"); | |
6703 UnregisterNMethodOopClosure reg_cl(this, nm); | |
6704 nm->oops_do(®_cl, true); | |
6705 } | |
6706 | |
6707 class MigrateCodeRootsHeapRegionClosure: public HeapRegionClosure { | |
6708 public: | |
6709 bool doHeapRegion(HeapRegion *hr) { | |
6710 assert(!hr->isHumongous(), "humongous region in collection set?"); | |
6711 hr->migrate_strong_code_roots(); | |
6712 return false; | |
6713 } | |
6714 }; | |
6715 | |
6716 void G1CollectedHeap::migrate_strong_code_roots() { | |
6717 MigrateCodeRootsHeapRegionClosure cl; | |
6718 double migrate_start = os::elapsedTime(); | |
6719 collection_set_iterate(&cl); | |
6720 double migration_time_ms = (os::elapsedTime() - migrate_start) * 1000.0; | |
6721 g1_policy()->phase_times()->record_strong_code_root_migration_time(migration_time_ms); | |
6722 } | |
6723 | |
6724 // Mark all the code roots that point into regions *not* in the | |
6725 // collection set. | |
6726 // | |
6727 // Note we do not want to use a "marking" CodeBlobToOopClosure while | |
6728 // walking the the code roots lists of regions not in the collection | |
6729 // set. Suppose we have an nmethod (M) that points to objects in two | |
6730 // separate regions - one in the collection set (R1) and one not (R2). | |
6731 // Using a "marking" CodeBlobToOopClosure here would result in "marking" | |
6732 // nmethod M when walking the code roots for R1. When we come to scan | |
6733 // the code roots for R2, we would see that M is already marked and it | |
6734 // would be skipped and the objects in R2 that are referenced from M | |
6735 // would not be evacuated. | |
6736 | |
6737 class MarkStrongCodeRootCodeBlobClosure: public CodeBlobClosure { | |
6738 | |
6739 class MarkStrongCodeRootOopClosure: public OopClosure { | |
6740 ConcurrentMark* _cm; | |
6741 HeapRegion* _hr; | |
6742 uint _worker_id; | |
6743 | |
6744 template <class T> void do_oop_work(T* p) { | |
6745 T heap_oop = oopDesc::load_heap_oop(p); | |
6746 if (!oopDesc::is_null(heap_oop)) { | |
6747 oop obj = oopDesc::decode_heap_oop_not_null(heap_oop); | |
6748 // Only mark objects in the region (which is assumed | |
6749 // to be not in the collection set). | |
6750 if (_hr->is_in(obj)) { | |
6751 _cm->grayRoot(obj, (size_t) obj->size(), _worker_id); | |
6752 } | |
6753 } | |
6754 } | |
6755 | |
6756 public: | |
6757 MarkStrongCodeRootOopClosure(ConcurrentMark* cm, HeapRegion* hr, uint worker_id) : | |
6758 _cm(cm), _hr(hr), _worker_id(worker_id) { | |
6759 assert(!_hr->in_collection_set(), "sanity"); | |
6760 } | |
6761 | |
6762 void do_oop(narrowOop* p) { do_oop_work(p); } | |
6763 void do_oop(oop* p) { do_oop_work(p); } | |
6764 }; | |
6765 | |
6766 MarkStrongCodeRootOopClosure _oop_cl; | |
6767 | |
6768 public: | |
6769 MarkStrongCodeRootCodeBlobClosure(ConcurrentMark* cm, HeapRegion* hr, uint worker_id): | |
6770 _oop_cl(cm, hr, worker_id) {} | |
6771 | |
6772 void do_code_blob(CodeBlob* cb) { | |
6773 nmethod* nm = (cb == NULL) ? NULL : cb->as_nmethod_or_null(); | |
6774 if (nm != NULL) { | |
6775 nm->oops_do(&_oop_cl); | |
6776 } | |
6777 } | |
6778 }; | |
6779 | |
6780 class MarkStrongCodeRootsHRClosure: public HeapRegionClosure { | |
6781 G1CollectedHeap* _g1h; | |
6782 uint _worker_id; | |
6783 | |
6784 public: | |
6785 MarkStrongCodeRootsHRClosure(G1CollectedHeap* g1h, uint worker_id) : | |
6786 _g1h(g1h), _worker_id(worker_id) {} | |
6787 | |
6788 bool doHeapRegion(HeapRegion *hr) { | |
6789 HeapRegionRemSet* hrrs = hr->rem_set(); | |
6790 if (hr->isHumongous()) { | |
6791 // Code roots should never be attached to a humongous region | |
6792 assert(hrrs->strong_code_roots_list_length() == 0, "sanity"); | |
6793 return false; | |
6794 } | |
6795 | |
6796 if (hr->in_collection_set()) { | |
6797 // Don't mark code roots into regions in the collection set here. | |
6798 // They will be marked when we scan them. | |
6799 return false; | |
6800 } | |
6801 | |
6802 MarkStrongCodeRootCodeBlobClosure cb_cl(_g1h->concurrent_mark(), hr, _worker_id); | |
6803 hr->strong_code_roots_do(&cb_cl); | |
6804 return false; | |
6805 } | |
6806 }; | |
6807 | |
6808 void G1CollectedHeap::mark_strong_code_roots(uint worker_id) { | |
6809 MarkStrongCodeRootsHRClosure cl(this, worker_id); | |
6810 heap_region_par_iterate_chunked(&cl, | |
6811 worker_id, | |
6812 workers()->active_workers(), | |
6813 HeapRegion::ParMarkRootClaimValue); | |
6814 } | |
6815 | |
6816 class RebuildStrongCodeRootClosure: public CodeBlobClosure { | |
6817 G1CollectedHeap* _g1h; | |
6818 | |
6819 public: | |
6820 RebuildStrongCodeRootClosure(G1CollectedHeap* g1h) : | |
6821 _g1h(g1h) {} | |
6822 | |
6823 void do_code_blob(CodeBlob* cb) { | |
6824 nmethod* nm = (cb != NULL) ? cb->as_nmethod_or_null() : NULL; | |
6825 if (nm == NULL) { | |
6826 return; | |
6827 } | |
6828 | |
6829 if (ScavengeRootsInCode && nm->detect_scavenge_root_oops()) { | |
6830 _g1h->register_nmethod(nm); | |
6831 } | |
6832 } | |
6833 }; | |
6834 | |
6835 void G1CollectedHeap::rebuild_strong_code_roots() { | |
6836 RebuildStrongCodeRootClosure blob_cl(this); | |
6837 CodeCache::blobs_do(&blob_cl); | |
6838 } |