comparison src/share/vm/gc_implementation/g1/g1MarkSweep.cpp @ 20804:7848fc12602b

Merge with jdk8u40-b25
author Gilles Duboscq <gilles.m.duboscq@oracle.com>
date Tue, 07 Apr 2015 14:58:49 +0200
parents 52b4284cb496 d35872270666
children d86b226e331a
comparison
equal deleted inserted replaced
20184:84105dcdb05b 20804:7848fc12602b
121 } 121 }
122 122
123 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading, 123 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
124 bool clear_all_softrefs) { 124 bool clear_all_softrefs) {
125 // Recursively traverse all live objects and mark them 125 // Recursively traverse all live objects and mark them
126 GCTraceTime tm("phase 1", G1Log::fine() && Verbose, true, gc_timer()); 126 GCTraceTime tm("phase 1", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
127 GenMarkSweep::trace(" 1"); 127 GenMarkSweep::trace(" 1");
128 128
129 SharedHeap* sh = SharedHeap::heap(); 129 SharedHeap* sh = SharedHeap::heap();
130 130
131 // Need cleared claim bits for the strong roots processing 131 // Need cleared claim bits for the roots processing
132 ClassLoaderDataGraph::clear_claimed_marks(); 132 ClassLoaderDataGraph::clear_claimed_marks();
133 133
134 sh->process_strong_roots(true, // activate StrongRootsScope 134 MarkingCodeBlobClosure follow_code_closure(&GenMarkSweep::follow_root_closure, !CodeBlobToOopClosure::FixRelocations);
135 false, // not scavenging. 135 sh->process_strong_roots(true, // activate StrongRootsScope
136 SharedHeap::SO_SystemClasses, 136 SharedHeap::SO_None,
137 &GenMarkSweep::follow_root_closure, 137 &GenMarkSweep::follow_root_closure,
138 &GenMarkSweep::follow_code_root_closure, 138 &GenMarkSweep::follow_cld_closure,
139 &GenMarkSweep::follow_klass_closure); 139 &follow_code_closure);
140 140
141 // Process reference objects found during marking 141 // Process reference objects found during marking
142 ReferenceProcessor* rp = GenMarkSweep::ref_processor(); 142 ReferenceProcessor* rp = GenMarkSweep::ref_processor();
143 assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Sanity"); 143 assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Sanity");
144 144
146 const ReferenceProcessorStats& stats = 146 const ReferenceProcessorStats& stats =
147 rp->process_discovered_references(&GenMarkSweep::is_alive, 147 rp->process_discovered_references(&GenMarkSweep::is_alive,
148 &GenMarkSweep::keep_alive, 148 &GenMarkSweep::keep_alive,
149 &GenMarkSweep::follow_stack_closure, 149 &GenMarkSweep::follow_stack_closure,
150 NULL, 150 NULL,
151 gc_timer()); 151 gc_timer(),
152 gc_tracer()->gc_id());
152 gc_tracer()->report_gc_reference_stats(stats); 153 gc_tracer()->report_gc_reference_stats(stats);
153 154
154 155
155 // This is the point where the entire marking should have completed. 156 // This is the point where the entire marking should have completed.
156 assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed"); 157 assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed");
191 } 192 }
192 193
193 gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive); 194 gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive);
194 } 195 }
195 196
196 class G1PrepareCompactClosure: public HeapRegionClosure {
197 G1CollectedHeap* _g1h;
198 ModRefBarrierSet* _mrbs;
199 CompactPoint _cp;
200 HeapRegionSetCount _humongous_regions_removed;
201
202 void free_humongous_region(HeapRegion* hr) {
203 HeapWord* end = hr->end();
204 FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep");
205
206 assert(hr->startsHumongous(),
207 "Only the start of a humongous region should be freed.");
208
209 hr->set_containing_set(NULL);
210 _humongous_regions_removed.increment(1u, hr->capacity());
211
212 _g1h->free_humongous_region(hr, &dummy_free_list, false /* par */);
213 hr->prepare_for_compaction(&_cp);
214 // Also clear the part of the card table that will be unused after
215 // compaction.
216 _mrbs->clear(MemRegion(hr->compaction_top(), end));
217 dummy_free_list.remove_all();
218 }
219
220 public:
221 G1PrepareCompactClosure(CompactibleSpace* cs)
222 : _g1h(G1CollectedHeap::heap()),
223 _mrbs(_g1h->g1_barrier_set()),
224 _cp(NULL, cs, cs->initialize_threshold()),
225 _humongous_regions_removed() { }
226
227 void update_sets() {
228 // We'll recalculate total used bytes and recreate the free list
229 // at the end of the GC, so no point in updating those values here.
230 HeapRegionSetCount empty_set;
231 _g1h->remove_from_old_sets(empty_set, _humongous_regions_removed);
232 }
233
234 bool doHeapRegion(HeapRegion* hr) {
235 if (hr->isHumongous()) {
236 if (hr->startsHumongous()) {
237 oop obj = oop(hr->bottom());
238 if (obj->is_gc_marked()) {
239 obj->forward_to(obj);
240 } else {
241 free_humongous_region(hr);
242 }
243 } else {
244 assert(hr->continuesHumongous(), "Invalid humongous.");
245 }
246 } else {
247 hr->prepare_for_compaction(&_cp);
248 // Also clear the part of the card table that will be unused after
249 // compaction.
250 _mrbs->clear(MemRegion(hr->compaction_top(), hr->end()));
251 }
252 return false;
253 }
254 };
255 197
256 void G1MarkSweep::mark_sweep_phase2() { 198 void G1MarkSweep::mark_sweep_phase2() {
257 // Now all live objects are marked, compute the new object addresses. 199 // Now all live objects are marked, compute the new object addresses.
258 200
259 // It is not required that we traverse spaces in the same order in 201 // It is not required that we traverse spaces in the same order in
260 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops 202 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
261 // tracking expects us to do so. See comment under phase4. 203 // tracking expects us to do so. See comment under phase4.
262 204
263 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 205 GCTraceTime tm("phase 2", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
264
265 GCTraceTime tm("phase 2", G1Log::fine() && Verbose, true, gc_timer());
266 GenMarkSweep::trace("2"); 206 GenMarkSweep::trace("2");
267 207
268 // find the first region 208 prepare_compaction();
269 HeapRegion* r = g1h->region_at(0);
270 CompactibleSpace* sp = r;
271 if (r->isHumongous() && oop(r->bottom())->is_gc_marked()) {
272 sp = r->next_compaction_space();
273 }
274
275 G1PrepareCompactClosure blk(sp);
276 g1h->heap_region_iterate(&blk);
277 blk.update_sets();
278 } 209 }
279 210
280 class G1AdjustPointersClosure: public HeapRegionClosure { 211 class G1AdjustPointersClosure: public HeapRegionClosure {
281 public: 212 public:
282 bool doHeapRegion(HeapRegion* r) { 213 bool doHeapRegion(HeapRegion* r) {
297 228
298 void G1MarkSweep::mark_sweep_phase3() { 229 void G1MarkSweep::mark_sweep_phase3() {
299 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 230 G1CollectedHeap* g1h = G1CollectedHeap::heap();
300 231
301 // Adjust the pointers to reflect the new locations 232 // Adjust the pointers to reflect the new locations
302 GCTraceTime tm("phase 3", G1Log::fine() && Verbose, true, gc_timer()); 233 GCTraceTime tm("phase 3", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
303 GenMarkSweep::trace("3"); 234 GenMarkSweep::trace("3");
304 235
305 SharedHeap* sh = SharedHeap::heap(); 236 SharedHeap* sh = SharedHeap::heap();
306 237
307 // Need cleared claim bits for the strong roots processing 238 // Need cleared claim bits for the roots processing
308 ClassLoaderDataGraph::clear_claimed_marks(); 239 ClassLoaderDataGraph::clear_claimed_marks();
309 240
310 sh->process_strong_roots(true, // activate StrongRootsScope 241 CodeBlobToOopClosure adjust_code_closure(&GenMarkSweep::adjust_pointer_closure, CodeBlobToOopClosure::FixRelocations);
311 false, // not scavenging. 242 sh->process_all_roots(true, // activate StrongRootsScope
312 SharedHeap::SO_AllClasses, 243 SharedHeap::SO_AllCodeCache,
313 &GenMarkSweep::adjust_pointer_closure, 244 &GenMarkSweep::adjust_pointer_closure,
314 NULL, // do not touch code cache here 245 &GenMarkSweep::adjust_cld_closure,
315 &GenMarkSweep::adjust_klass_closure); 246 &adjust_code_closure);
316 247
317 assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity"); 248 assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity");
318 g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure); 249 g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure);
319 250
320 // Now adjust pointers in remaining weak roots. (All of which should 251 // Now adjust pointers in remaining weak roots. (All of which should
321 // have been cleared if they pointed to non-surviving objects.) 252 // have been cleared if they pointed to non-surviving objects.)
322 g1h->g1_process_weak_roots(&GenMarkSweep::adjust_pointer_closure); 253 sh->process_weak_roots(&GenMarkSweep::adjust_pointer_closure);
323 254
324 if (G1StringDedup::is_enabled()) { 255 if (G1StringDedup::is_enabled()) {
325 G1StringDedup::oops_do(&GenMarkSweep::adjust_pointer_closure); 256 G1StringDedup::oops_do(&GenMarkSweep::adjust_pointer_closure);
326 } 257 }
327 258
360 // in the same order in phase2, phase3 and phase4. We don't quite do that 291 // in the same order in phase2, phase3 and phase4. We don't quite do that
361 // here (code and comment not fixed for perm removal), so we tell the validate code 292 // here (code and comment not fixed for perm removal), so we tell the validate code
362 // to use a higher index (saved from phase2) when verifying perm_gen. 293 // to use a higher index (saved from phase2) when verifying perm_gen.
363 G1CollectedHeap* g1h = G1CollectedHeap::heap(); 294 G1CollectedHeap* g1h = G1CollectedHeap::heap();
364 295
365 GCTraceTime tm("phase 4", G1Log::fine() && Verbose, true, gc_timer()); 296 GCTraceTime tm("phase 4", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
366 GenMarkSweep::trace("4"); 297 GenMarkSweep::trace("4");
367 298
368 G1SpaceCompactClosure blk; 299 G1SpaceCompactClosure blk;
369 g1h->heap_region_iterate(&blk); 300 g1h->heap_region_iterate(&blk);
370 301
371 } 302 }
303
304 void G1MarkSweep::prepare_compaction_work(G1PrepareCompactClosure* blk) {
305 G1CollectedHeap* g1h = G1CollectedHeap::heap();
306 g1h->heap_region_iterate(blk);
307 blk->update_sets();
308 }
309
310 void G1PrepareCompactClosure::free_humongous_region(HeapRegion* hr) {
311 HeapWord* end = hr->end();
312 FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep");
313
314 assert(hr->startsHumongous(),
315 "Only the start of a humongous region should be freed.");
316
317 hr->set_containing_set(NULL);
318 _humongous_regions_removed.increment(1u, hr->capacity());
319
320 _g1h->free_humongous_region(hr, &dummy_free_list, false /* par */);
321 prepare_for_compaction(hr, end);
322 dummy_free_list.remove_all();
323 }
324
325 void G1PrepareCompactClosure::prepare_for_compaction(HeapRegion* hr, HeapWord* end) {
326 // If this is the first live region that we came across which we can compact,
327 // initialize the CompactPoint.
328 if (!is_cp_initialized()) {
329 _cp.space = hr;
330 _cp.threshold = hr->initialize_threshold();
331 }
332 prepare_for_compaction_work(&_cp, hr, end);
333 }
334
335 void G1PrepareCompactClosure::prepare_for_compaction_work(CompactPoint* cp,
336 HeapRegion* hr,
337 HeapWord* end) {
338 hr->prepare_for_compaction(cp);
339 // Also clear the part of the card table that will be unused after
340 // compaction.
341 _mrbs->clear(MemRegion(hr->compaction_top(), end));
342 }
343
344 void G1PrepareCompactClosure::update_sets() {
345 // We'll recalculate total used bytes and recreate the free list
346 // at the end of the GC, so no point in updating those values here.
347 HeapRegionSetCount empty_set;
348 _g1h->remove_from_old_sets(empty_set, _humongous_regions_removed);
349 }
350
351 bool G1PrepareCompactClosure::doHeapRegion(HeapRegion* hr) {
352 if (hr->isHumongous()) {
353 if (hr->startsHumongous()) {
354 oop obj = oop(hr->bottom());
355 if (obj->is_gc_marked()) {
356 obj->forward_to(obj);
357 } else {
358 free_humongous_region(hr);
359 }
360 } else {
361 assert(hr->continuesHumongous(), "Invalid humongous.");
362 }
363 } else {
364 prepare_for_compaction(hr, hr->end());
365 }
366 return false;
367 }