comparison src/share/vm/gc_implementation/concurrentMarkSweep/concurrentMarkSweepGeneration.cpp @ 10405:f2110083203d

8005849: JEP 167: Event-Based JVM Tracing Reviewed-by: acorn, coleenp, sla Contributed-by: Karen Kinnear <karen.kinnear@oracle.com>, Bengt Rutisson <bengt.rutisson@oracle.com>, Calvin Cheung <calvin.cheung@oracle.com>, Erik Gahlin <erik.gahlin@oracle.com>, Erik Helin <erik.helin@oracle.com>, Jesper Wilhelmsson <jesper.wilhelmsson@oracle.com>, Keith McGuigan <keith.mcguigan@oracle.com>, Mattias Tobiasson <mattias.tobiasson@oracle.com>, Markus Gronlund <markus.gronlund@oracle.com>, Mikael Auno <mikael.auno@oracle.com>, Nils Eliasson <nils.eliasson@oracle.com>, Nils Loodin <nils.loodin@oracle.com>, Rickard Backman <rickard.backman@oracle.com>, Staffan Larsen <staffan.larsen@oracle.com>, Stefan Karlsson <stefan.karlsson@oracle.com>, Yekaterina Kantserova <yekaterina.kantserova@oracle.com>
author sla
date Mon, 10 Jun 2013 11:30:51 +0200
parents 7c5a1b62f53d
children 71180a6e5080
comparison
equal deleted inserted replaced
10404:d0add7016434 10405:f2110083203d
35 #include "gc_implementation/concurrentMarkSweep/concurrentMarkSweepGeneration.inline.hpp" 35 #include "gc_implementation/concurrentMarkSweep/concurrentMarkSweepGeneration.inline.hpp"
36 #include "gc_implementation/concurrentMarkSweep/concurrentMarkSweepThread.hpp" 36 #include "gc_implementation/concurrentMarkSweep/concurrentMarkSweepThread.hpp"
37 #include "gc_implementation/concurrentMarkSweep/vmCMSOperations.hpp" 37 #include "gc_implementation/concurrentMarkSweep/vmCMSOperations.hpp"
38 #include "gc_implementation/parNew/parNewGeneration.hpp" 38 #include "gc_implementation/parNew/parNewGeneration.hpp"
39 #include "gc_implementation/shared/collectorCounters.hpp" 39 #include "gc_implementation/shared/collectorCounters.hpp"
40 #include "gc_implementation/shared/gcTimer.hpp"
41 #include "gc_implementation/shared/gcTrace.hpp"
42 #include "gc_implementation/shared/gcTraceTime.hpp"
40 #include "gc_implementation/shared/isGCActiveMark.hpp" 43 #include "gc_implementation/shared/isGCActiveMark.hpp"
41 #include "gc_interface/collectedHeap.inline.hpp" 44 #include "gc_interface/collectedHeap.inline.hpp"
45 #include "memory/allocation.hpp"
42 #include "memory/cardTableRS.hpp" 46 #include "memory/cardTableRS.hpp"
43 #include "memory/collectorPolicy.hpp" 47 #include "memory/collectorPolicy.hpp"
44 #include "memory/gcLocker.inline.hpp" 48 #include "memory/gcLocker.inline.hpp"
45 #include "memory/genCollectedHeap.hpp" 49 #include "memory/genCollectedHeap.hpp"
46 #include "memory/genMarkSweep.hpp" 50 #include "memory/genMarkSweep.hpp"
58 #include "services/memoryService.hpp" 62 #include "services/memoryService.hpp"
59 #include "services/runtimeService.hpp" 63 #include "services/runtimeService.hpp"
60 64
61 // statics 65 // statics
62 CMSCollector* ConcurrentMarkSweepGeneration::_collector = NULL; 66 CMSCollector* ConcurrentMarkSweepGeneration::_collector = NULL;
63 bool CMSCollector::_full_gc_requested = false; 67 bool CMSCollector::_full_gc_requested = false;
68 GCCause::Cause CMSCollector::_full_gc_cause = GCCause::_no_gc;
64 69
65 ////////////////////////////////////////////////////////////////// 70 //////////////////////////////////////////////////////////////////
66 // In support of CMS/VM thread synchronization 71 // In support of CMS/VM thread synchronization
67 ////////////////////////////////////////////////////////////////// 72 //////////////////////////////////////////////////////////////////
68 // We split use of the CGC_lock into 2 "levels". 73 // We split use of the CGC_lock into 2 "levels".
589 _collector_policy(cp), 594 _collector_policy(cp),
590 _should_unload_classes(false), 595 _should_unload_classes(false),
591 _concurrent_cycles_since_last_unload(0), 596 _concurrent_cycles_since_last_unload(0),
592 _roots_scanning_options(0), 597 _roots_scanning_options(0),
593 _inter_sweep_estimate(CMS_SweepWeight, CMS_SweepPadding), 598 _inter_sweep_estimate(CMS_SweepWeight, CMS_SweepPadding),
594 _intra_sweep_estimate(CMS_SweepWeight, CMS_SweepPadding) 599 _intra_sweep_estimate(CMS_SweepWeight, CMS_SweepPadding),
600 _gc_tracer_cm(new (ResourceObj::C_HEAP, mtGC) CMSTracer()),
601 _gc_timer_cm(new (ResourceObj::C_HEAP, mtGC) ConcurrentGCTimer()),
602 _cms_start_registered(false)
595 { 603 {
596 if (ExplicitGCInvokesConcurrentAndUnloadsClasses) { 604 if (ExplicitGCInvokesConcurrentAndUnloadsClasses) {
597 ExplicitGCInvokesConcurrent = true; 605 ExplicitGCInvokesConcurrent = true;
598 } 606 }
599 // Now expand the span and allocate the collection support structures 607 // Now expand the span and allocate the collection support structures
1674 } 1682 }
1675 acquire_control_and_collect(full, clear_all_soft_refs); 1683 acquire_control_and_collect(full, clear_all_soft_refs);
1676 _full_gcs_since_conc_gc++; 1684 _full_gcs_since_conc_gc++;
1677 } 1685 }
1678 1686
1679 void CMSCollector::request_full_gc(unsigned int full_gc_count) { 1687 void CMSCollector::request_full_gc(unsigned int full_gc_count, GCCause::Cause cause) {
1680 GenCollectedHeap* gch = GenCollectedHeap::heap(); 1688 GenCollectedHeap* gch = GenCollectedHeap::heap();
1681 unsigned int gc_count = gch->total_full_collections(); 1689 unsigned int gc_count = gch->total_full_collections();
1682 if (gc_count == full_gc_count) { 1690 if (gc_count == full_gc_count) {
1683 MutexLockerEx y(CGC_lock, Mutex::_no_safepoint_check_flag); 1691 MutexLockerEx y(CGC_lock, Mutex::_no_safepoint_check_flag);
1684 _full_gc_requested = true; 1692 _full_gc_requested = true;
1693 _full_gc_cause = cause;
1685 CGC_lock->notify(); // nudge CMS thread 1694 CGC_lock->notify(); // nudge CMS thread
1686 } else { 1695 } else {
1687 assert(gc_count > full_gc_count, "Error: causal loop"); 1696 assert(gc_count > full_gc_count, "Error: causal loop");
1697 }
1698 }
1699
1700 bool CMSCollector::is_external_interruption() {
1701 GCCause::Cause cause = GenCollectedHeap::heap()->gc_cause();
1702 return GCCause::is_user_requested_gc(cause) ||
1703 GCCause::is_serviceability_requested_gc(cause);
1704 }
1705
1706 void CMSCollector::report_concurrent_mode_interruption() {
1707 if (is_external_interruption()) {
1708 if (PrintGCDetails) {
1709 gclog_or_tty->print(" (concurrent mode interrupted)");
1710 }
1711 } else {
1712 if (PrintGCDetails) {
1713 gclog_or_tty->print(" (concurrent mode failure)");
1714 }
1715 _gc_tracer_cm->report_concurrent_mode_failure();
1688 } 1716 }
1689 } 1717 }
1690 1718
1691 1719
1692 // The foreground and background collectors need to coordinate in order 1720 // The foreground and background collectors need to coordinate in order
1843 should_compact = false; 1871 should_compact = false;
1844 } 1872 }
1845 } 1873 }
1846 ) 1874 )
1847 1875
1848 if (PrintGCDetails && first_state > Idling) { 1876 if (first_state > Idling) {
1849 GCCause::Cause cause = GenCollectedHeap::heap()->gc_cause(); 1877 report_concurrent_mode_interruption();
1850 if (GCCause::is_user_requested_gc(cause) ||
1851 GCCause::is_serviceability_requested_gc(cause)) {
1852 gclog_or_tty->print(" (concurrent mode interrupted)");
1853 } else {
1854 gclog_or_tty->print(" (concurrent mode failure)");
1855 }
1856 } 1878 }
1857 1879
1858 set_did_compact(should_compact); 1880 set_did_compact(should_compact);
1859 if (should_compact) { 1881 if (should_compact) {
1860 // If the collection is being acquired from the background 1882 // If the collection is being acquired from the background
1865 // by the mutator). 1887 // by the mutator).
1866 // Scrub the list of those references because Mark-Sweep-Compact 1888 // Scrub the list of those references because Mark-Sweep-Compact
1867 // code assumes referents are not NULL and that all discovered 1889 // code assumes referents are not NULL and that all discovered
1868 // Reference objects are active. 1890 // Reference objects are active.
1869 ref_processor()->clean_up_discovered_references(); 1891 ref_processor()->clean_up_discovered_references();
1892
1893 if (first_state > Idling) {
1894 save_heap_summary();
1895 }
1870 1896
1871 do_compaction_work(clear_all_soft_refs); 1897 do_compaction_work(clear_all_soft_refs);
1872 1898
1873 // Has the GC time limit been exceeded? 1899 // Has the GC time limit been exceeded?
1874 DefNewGeneration* young_gen = _young_gen->as_DefNewGeneration(); 1900 DefNewGeneration* young_gen = _young_gen->as_DefNewGeneration();
1969 1995
1970 // A work method used by the foreground collector to do 1996 // A work method used by the foreground collector to do
1971 // a mark-sweep-compact. 1997 // a mark-sweep-compact.
1972 void CMSCollector::do_compaction_work(bool clear_all_soft_refs) { 1998 void CMSCollector::do_compaction_work(bool clear_all_soft_refs) {
1973 GenCollectedHeap* gch = GenCollectedHeap::heap(); 1999 GenCollectedHeap* gch = GenCollectedHeap::heap();
1974 TraceTime t("CMS:MSC ", PrintGCDetails && Verbose, true, gclog_or_tty); 2000
2001 STWGCTimer* gc_timer = GenMarkSweep::gc_timer();
2002 gc_timer->register_gc_start(os::elapsed_counter());
2003
2004 SerialOldTracer* gc_tracer = GenMarkSweep::gc_tracer();
2005 gc_tracer->report_gc_start(gch->gc_cause(), gc_timer->gc_start());
2006
2007 GCTraceTime t("CMS:MSC ", PrintGCDetails && Verbose, true, NULL);
1975 if (PrintGC && Verbose && !(GCCause::is_user_requested_gc(gch->gc_cause()))) { 2008 if (PrintGC && Verbose && !(GCCause::is_user_requested_gc(gch->gc_cause()))) {
1976 gclog_or_tty->print_cr("Compact ConcurrentMarkSweepGeneration after %d " 2009 gclog_or_tty->print_cr("Compact ConcurrentMarkSweepGeneration after %d "
1977 "collections passed to foreground collector", _full_gcs_since_conc_gc); 2010 "collections passed to foreground collector", _full_gcs_since_conc_gc);
1978 } 2011 }
1979 2012
2060 // Sample collection pause time and reset for collection interval. 2093 // Sample collection pause time and reset for collection interval.
2061 if (UseAdaptiveSizePolicy) { 2094 if (UseAdaptiveSizePolicy) {
2062 size_policy()->msc_collection_end(gch->gc_cause()); 2095 size_policy()->msc_collection_end(gch->gc_cause());
2063 } 2096 }
2064 2097
2098 gc_timer->register_gc_end(os::elapsed_counter());
2099
2100 gc_tracer->report_gc_end(gc_timer->gc_end(), gc_timer->time_partitions());
2101
2065 // For a mark-sweep-compact, compute_new_size() will be called 2102 // For a mark-sweep-compact, compute_new_size() will be called
2066 // in the heap's do_collection() method. 2103 // in the heap's do_collection() method.
2067 } 2104 }
2068 2105
2069 // A work method used by the foreground collector to do 2106 // A work method used by the foreground collector to do
2091 // In the foreground case don't do the precleaning since 2128 // In the foreground case don't do the precleaning since
2092 // it is not done concurrently and there is extra work 2129 // it is not done concurrently and there is extra work
2093 // required. 2130 // required.
2094 _collectorState = FinalMarking; 2131 _collectorState = FinalMarking;
2095 } 2132 }
2096 collect_in_foreground(clear_all_soft_refs); 2133 collect_in_foreground(clear_all_soft_refs, GenCollectedHeap::heap()->gc_cause());
2097 2134
2098 // For a mark-sweep, compute_new_size() will be called 2135 // For a mark-sweep, compute_new_size() will be called
2099 // in the heap's do_collection() method. 2136 // in the heap's do_collection() method.
2100 } 2137 }
2101 2138
2151 // the different locking requirements of the background collector and the 2188 // the different locking requirements of the background collector and the
2152 // foreground collector. There was originally an attempt to share 2189 // foreground collector. There was originally an attempt to share
2153 // one "collect" method between the background collector and the foreground 2190 // one "collect" method between the background collector and the foreground
2154 // collector but the if-then-else required made it cleaner to have 2191 // collector but the if-then-else required made it cleaner to have
2155 // separate methods. 2192 // separate methods.
2156 void CMSCollector::collect_in_background(bool clear_all_soft_refs) { 2193 void CMSCollector::collect_in_background(bool clear_all_soft_refs, GCCause::Cause cause) {
2157 assert(Thread::current()->is_ConcurrentGC_thread(), 2194 assert(Thread::current()->is_ConcurrentGC_thread(),
2158 "A CMS asynchronous collection is only allowed on a CMS thread."); 2195 "A CMS asynchronous collection is only allowed on a CMS thread.");
2159 2196
2160 GenCollectedHeap* gch = GenCollectedHeap::heap(); 2197 GenCollectedHeap* gch = GenCollectedHeap::heap();
2161 { 2198 {
2170 assert(!_foregroundGCShouldWait, "Should be clear"); 2207 assert(!_foregroundGCShouldWait, "Should be clear");
2171 return; 2208 return;
2172 } else { 2209 } else {
2173 assert(_collectorState == Idling, "Should be idling before start."); 2210 assert(_collectorState == Idling, "Should be idling before start.");
2174 _collectorState = InitialMarking; 2211 _collectorState = InitialMarking;
2212 register_gc_start(cause);
2175 // Reset the expansion cause, now that we are about to begin 2213 // Reset the expansion cause, now that we are about to begin
2176 // a new cycle. 2214 // a new cycle.
2177 clear_expansion_cause(); 2215 clear_expansion_cause();
2178 2216
2179 // Clear the MetaspaceGC flag since a concurrent collection 2217 // Clear the MetaspaceGC flag since a concurrent collection
2182 } 2220 }
2183 // Decide if we want to enable class unloading as part of the 2221 // Decide if we want to enable class unloading as part of the
2184 // ensuing concurrent GC cycle. 2222 // ensuing concurrent GC cycle.
2185 update_should_unload_classes(); 2223 update_should_unload_classes();
2186 _full_gc_requested = false; // acks all outstanding full gc requests 2224 _full_gc_requested = false; // acks all outstanding full gc requests
2225 _full_gc_cause = GCCause::_no_gc;
2187 // Signal that we are about to start a collection 2226 // Signal that we are about to start a collection
2188 gch->increment_total_full_collections(); // ... starting a collection cycle 2227 gch->increment_total_full_collections(); // ... starting a collection cycle
2189 _collection_count_start = gch->total_full_collections(); 2228 _collection_count_start = gch->total_full_collections();
2190 } 2229 }
2191 2230
2261 switch (_collectorState) { 2300 switch (_collectorState) {
2262 case InitialMarking: 2301 case InitialMarking:
2263 { 2302 {
2264 ReleaseForegroundGC x(this); 2303 ReleaseForegroundGC x(this);
2265 stats().record_cms_begin(); 2304 stats().record_cms_begin();
2266
2267 VM_CMS_Initial_Mark initial_mark_op(this); 2305 VM_CMS_Initial_Mark initial_mark_op(this);
2268 VMThread::execute(&initial_mark_op); 2306 VMThread::execute(&initial_mark_op);
2269 } 2307 }
2270 // The collector state may be any legal state at this point 2308 // The collector state may be any legal state at this point
2271 // since the background collector may have yielded to the 2309 // since the background collector may have yielded to the
2341 ReleaseForegroundGC x(this); // unblock FG collection 2379 ReleaseForegroundGC x(this); // unblock FG collection
2342 MutexLockerEx y(Heap_lock, Mutex::_no_safepoint_check_flag); 2380 MutexLockerEx y(Heap_lock, Mutex::_no_safepoint_check_flag);
2343 CMSTokenSync z(true); // not strictly needed. 2381 CMSTokenSync z(true); // not strictly needed.
2344 if (_collectorState == Resizing) { 2382 if (_collectorState == Resizing) {
2345 compute_new_size(); 2383 compute_new_size();
2384 save_heap_summary();
2346 _collectorState = Resetting; 2385 _collectorState = Resetting;
2347 } else { 2386 } else {
2348 assert(_collectorState == Idling, "The state should only change" 2387 assert(_collectorState == Idling, "The state should only change"
2349 " because the foreground collector has finished the collection"); 2388 " because the foreground collector has finished the collection");
2350 } 2389 }
2399 if (PrintGC && Verbose) { 2438 if (PrintGC && Verbose) {
2400 _cmsGen->print_heap_change(prev_used); 2439 _cmsGen->print_heap_change(prev_used);
2401 } 2440 }
2402 } 2441 }
2403 2442
2404 void CMSCollector::collect_in_foreground(bool clear_all_soft_refs) { 2443 void CMSCollector::register_foreground_gc_start(GCCause::Cause cause) {
2444 if (!_cms_start_registered) {
2445 register_gc_start(cause);
2446 }
2447 }
2448
2449 void CMSCollector::register_gc_start(GCCause::Cause cause) {
2450 _cms_start_registered = true;
2451 _gc_timer_cm->register_gc_start(os::elapsed_counter());
2452 _gc_tracer_cm->report_gc_start(cause, _gc_timer_cm->gc_start());
2453 }
2454
2455 void CMSCollector::register_gc_end() {
2456 if (_cms_start_registered) {
2457 report_heap_summary(GCWhen::AfterGC);
2458
2459 _gc_timer_cm->register_gc_end(os::elapsed_counter());
2460 _gc_tracer_cm->report_gc_end(_gc_timer_cm->gc_end(), _gc_timer_cm->time_partitions());
2461 _cms_start_registered = false;
2462 }
2463 }
2464
2465 void CMSCollector::save_heap_summary() {
2466 GenCollectedHeap* gch = GenCollectedHeap::heap();
2467 _last_heap_summary = gch->create_heap_summary();
2468 _last_metaspace_summary = gch->create_metaspace_summary();
2469 }
2470
2471 void CMSCollector::report_heap_summary(GCWhen::Type when) {
2472 _gc_tracer_cm->report_gc_heap_summary(when, _last_heap_summary, _last_metaspace_summary);
2473 }
2474
2475 void CMSCollector::collect_in_foreground(bool clear_all_soft_refs, GCCause::Cause cause) {
2405 assert(_foregroundGCIsActive && !_foregroundGCShouldWait, 2476 assert(_foregroundGCIsActive && !_foregroundGCShouldWait,
2406 "Foreground collector should be waiting, not executing"); 2477 "Foreground collector should be waiting, not executing");
2407 assert(Thread::current()->is_VM_thread(), "A foreground collection" 2478 assert(Thread::current()->is_VM_thread(), "A foreground collection"
2408 "may only be done by the VM Thread with the world stopped"); 2479 "may only be done by the VM Thread with the world stopped");
2409 assert(ConcurrentMarkSweepThread::vm_thread_has_cms_token(), 2480 assert(ConcurrentMarkSweepThread::vm_thread_has_cms_token(),
2410 "VM thread should have CMS token"); 2481 "VM thread should have CMS token");
2411 2482
2412 NOT_PRODUCT(TraceTime t("CMS:MS (foreground) ", PrintGCDetails && Verbose, 2483 NOT_PRODUCT(GCTraceTime t("CMS:MS (foreground) ", PrintGCDetails && Verbose,
2413 true, gclog_or_tty);) 2484 true, NULL);)
2414 if (UseAdaptiveSizePolicy) { 2485 if (UseAdaptiveSizePolicy) {
2415 size_policy()->ms_collection_begin(); 2486 size_policy()->ms_collection_begin();
2416 } 2487 }
2417 COMPILER2_PRESENT(DerivedPointerTableDeactivate dpt_deact); 2488 COMPILER2_PRESENT(DerivedPointerTableDeactivate dpt_deact);
2418 2489
2432 gclog_or_tty->print_cr("Thread " INTPTR_FORMAT " in CMS state %d", 2503 gclog_or_tty->print_cr("Thread " INTPTR_FORMAT " in CMS state %d",
2433 Thread::current(), _collectorState); 2504 Thread::current(), _collectorState);
2434 } 2505 }
2435 switch (_collectorState) { 2506 switch (_collectorState) {
2436 case InitialMarking: 2507 case InitialMarking:
2508 register_foreground_gc_start(cause);
2437 init_mark_was_synchronous = true; // fact to be exploited in re-mark 2509 init_mark_was_synchronous = true; // fact to be exploited in re-mark
2438 checkpointRootsInitial(false); 2510 checkpointRootsInitial(false);
2439 assert(_collectorState == Marking, "Collector state should have changed" 2511 assert(_collectorState == Marking, "Collector state should have changed"
2440 " within checkpointRootsInitial()"); 2512 " within checkpointRootsInitial()");
2441 break; 2513 break;
2480 // The heap has been resized. 2552 // The heap has been resized.
2481 if (VerifyDuringGC && 2553 if (VerifyDuringGC &&
2482 GenCollectedHeap::heap()->total_collections() >= VerifyGCStartAt) { 2554 GenCollectedHeap::heap()->total_collections() >= VerifyGCStartAt) {
2483 Universe::verify("Verify before reset: "); 2555 Universe::verify("Verify before reset: ");
2484 } 2556 }
2557 save_heap_summary();
2485 reset(false); 2558 reset(false);
2486 assert(_collectorState == Idling, "Collector state should " 2559 assert(_collectorState == Idling, "Collector state should "
2487 "have changed"); 2560 "have changed");
2488 break; 2561 break;
2489 case Precleaning: 2562 case Precleaning:
3502 void CMSCollector::checkpointRootsInitial(bool asynch) { 3575 void CMSCollector::checkpointRootsInitial(bool asynch) {
3503 assert(_collectorState == InitialMarking, "Wrong collector state"); 3576 assert(_collectorState == InitialMarking, "Wrong collector state");
3504 check_correct_thread_executing(); 3577 check_correct_thread_executing();
3505 TraceCMSMemoryManagerStats tms(_collectorState,GenCollectedHeap::heap()->gc_cause()); 3578 TraceCMSMemoryManagerStats tms(_collectorState,GenCollectedHeap::heap()->gc_cause());
3506 3579
3580 save_heap_summary();
3581 report_heap_summary(GCWhen::BeforeGC);
3582
3507 ReferenceProcessor* rp = ref_processor(); 3583 ReferenceProcessor* rp = ref_processor();
3508 SpecializationStats::clear(); 3584 SpecializationStats::clear();
3509 assert(_restart_addr == NULL, "Control point invariant"); 3585 assert(_restart_addr == NULL, "Control point invariant");
3510 if (asynch) { 3586 if (asynch) {
3511 // acquire locks for subsequent manipulations 3587 // acquire locks for subsequent manipulations
3547 3623
3548 // Setup the verification and class unloading state for this 3624 // Setup the verification and class unloading state for this
3549 // CMS collection cycle. 3625 // CMS collection cycle.
3550 setup_cms_unloading_and_verification_state(); 3626 setup_cms_unloading_and_verification_state();
3551 3627
3552 NOT_PRODUCT(TraceTime t("\ncheckpointRootsInitialWork", 3628 NOT_PRODUCT(GCTraceTime t("\ncheckpointRootsInitialWork",
3553 PrintGCDetails && Verbose, true, gclog_or_tty);) 3629 PrintGCDetails && Verbose, true, _gc_timer_cm);)
3554 if (UseAdaptiveSizePolicy) { 3630 if (UseAdaptiveSizePolicy) {
3555 size_policy()->checkpoint_roots_initial_begin(); 3631 size_policy()->checkpoint_roots_initial_begin();
3556 } 3632 }
3557 3633
3558 // Reset all the PLAB chunk arrays if necessary. 3634 // Reset all the PLAB chunk arrays if necessary.
4540 // The following will yield to allow foreground 4616 // The following will yield to allow foreground
4541 // collection to proceed promptly. XXX YSR: 4617 // collection to proceed promptly. XXX YSR:
4542 // The code in this method may need further 4618 // The code in this method may need further
4543 // tweaking for better performance and some restructuring 4619 // tweaking for better performance and some restructuring
4544 // for cleaner interfaces. 4620 // for cleaner interfaces.
4621 GCTimer *gc_timer = NULL; // Currently not tracing concurrent phases
4545 rp->preclean_discovered_references( 4622 rp->preclean_discovered_references(
4546 rp->is_alive_non_header(), &keep_alive, &complete_trace, &yield_cl); 4623 rp->is_alive_non_header(), &keep_alive, &complete_trace, &yield_cl,
4624 gc_timer);
4547 } 4625 }
4548 4626
4549 if (clean_survivor) { // preclean the active survivor space(s) 4627 if (clean_survivor) { // preclean the active survivor space(s)
4550 assert(_young_gen->kind() == Generation::DefNew || 4628 assert(_young_gen->kind() == Generation::DefNew ||
4551 _young_gen->kind() == Generation::ParNew || 4629 _young_gen->kind() == Generation::ParNew ||
4883 if (CMSScavengeBeforeRemark) { 4961 if (CMSScavengeBeforeRemark) {
4884 GenCollectedHeap* gch = GenCollectedHeap::heap(); 4962 GenCollectedHeap* gch = GenCollectedHeap::heap();
4885 // Temporarily set flag to false, GCH->do_collection will 4963 // Temporarily set flag to false, GCH->do_collection will
4886 // expect it to be false and set to true 4964 // expect it to be false and set to true
4887 FlagSetting fl(gch->_is_gc_active, false); 4965 FlagSetting fl(gch->_is_gc_active, false);
4888 NOT_PRODUCT(TraceTime t("Scavenge-Before-Remark", 4966 NOT_PRODUCT(GCTraceTime t("Scavenge-Before-Remark",
4889 PrintGCDetails && Verbose, true, gclog_or_tty);) 4967 PrintGCDetails && Verbose, true, _gc_timer_cm);)
4890 int level = _cmsGen->level() - 1; 4968 int level = _cmsGen->level() - 1;
4891 if (level >= 0) { 4969 if (level >= 0) {
4892 gch->do_collection(true, // full (i.e. force, see below) 4970 gch->do_collection(true, // full (i.e. force, see below)
4893 false, // !clear_all_soft_refs 4971 false, // !clear_all_soft_refs
4894 0, // size 4972 0, // size
4913 } 4991 }
4914 4992
4915 void CMSCollector::checkpointRootsFinalWork(bool asynch, 4993 void CMSCollector::checkpointRootsFinalWork(bool asynch,
4916 bool clear_all_soft_refs, bool init_mark_was_synchronous) { 4994 bool clear_all_soft_refs, bool init_mark_was_synchronous) {
4917 4995
4918 NOT_PRODUCT(TraceTime tr("checkpointRootsFinalWork", PrintGCDetails, false, gclog_or_tty);) 4996 NOT_PRODUCT(GCTraceTime tr("checkpointRootsFinalWork", PrintGCDetails, false, _gc_timer_cm);)
4919 4997
4920 assert(haveFreelistLocks(), "must have free list locks"); 4998 assert(haveFreelistLocks(), "must have free list locks");
4921 assert_lock_strong(bitMapLock()); 4999 assert_lock_strong(bitMapLock());
4922 5000
4923 if (UseAdaptiveSizePolicy) { 5001 if (UseAdaptiveSizePolicy) {
4964 // are detected via the mod union table which is the set of all cards 5042 // are detected via the mod union table which is the set of all cards
4965 // dirtied since the first checkpoint in this GC cycle and prior to 5043 // dirtied since the first checkpoint in this GC cycle and prior to
4966 // the most recent young generation GC, minus those cleaned up by the 5044 // the most recent young generation GC, minus those cleaned up by the
4967 // concurrent precleaning. 5045 // concurrent precleaning.
4968 if (CMSParallelRemarkEnabled && CollectedHeap::use_parallel_gc_threads()) { 5046 if (CMSParallelRemarkEnabled && CollectedHeap::use_parallel_gc_threads()) {
4969 TraceTime t("Rescan (parallel) ", PrintGCDetails, false, gclog_or_tty); 5047 GCTraceTime t("Rescan (parallel) ", PrintGCDetails, false, _gc_timer_cm);
4970 do_remark_parallel(); 5048 do_remark_parallel();
4971 } else { 5049 } else {
4972 TraceTime t("Rescan (non-parallel) ", PrintGCDetails, false, 5050 GCTraceTime t("Rescan (non-parallel) ", PrintGCDetails, false,
4973 gclog_or_tty); 5051 _gc_timer_cm);
4974 do_remark_non_parallel(); 5052 do_remark_non_parallel();
4975 } 5053 }
4976 } 5054 }
4977 } else { 5055 } else {
4978 assert(!asynch, "Can't have init_mark_was_synchronous in asynch mode"); 5056 assert(!asynch, "Can't have init_mark_was_synchronous in asynch mode");
4981 } 5059 }
4982 verify_work_stacks_empty(); 5060 verify_work_stacks_empty();
4983 verify_overflow_empty(); 5061 verify_overflow_empty();
4984 5062
4985 { 5063 {
4986 NOT_PRODUCT(TraceTime ts("refProcessingWork", PrintGCDetails, false, gclog_or_tty);) 5064 NOT_PRODUCT(GCTraceTime ts("refProcessingWork", PrintGCDetails, false, _gc_timer_cm);)
4987 refProcessingWork(asynch, clear_all_soft_refs); 5065 refProcessingWork(asynch, clear_all_soft_refs);
4988 } 5066 }
4989 verify_work_stacks_empty(); 5067 verify_work_stacks_empty();
4990 verify_overflow_empty(); 5068 verify_overflow_empty();
4991 5069
5041 5119
5042 if ((VerifyAfterGC || VerifyDuringGC) && 5120 if ((VerifyAfterGC || VerifyDuringGC) &&
5043 GenCollectedHeap::heap()->total_collections() >= VerifyGCStartAt) { 5121 GenCollectedHeap::heap()->total_collections() >= VerifyGCStartAt) {
5044 verify_after_remark(); 5122 verify_after_remark();
5045 } 5123 }
5124
5125 _gc_tracer_cm->report_object_count_after_gc(&_is_alive_closure);
5046 5126
5047 // Change under the freelistLocks. 5127 // Change under the freelistLocks.
5048 _collectorState = Sweeping; 5128 _collectorState = Sweeping;
5049 // Call isAllClear() under bitMapLock 5129 // Call isAllClear() under bitMapLock
5050 assert(_modUnionTable.isAllClear(), 5130 assert(_modUnionTable.isAllClear(),
5695 MarkFromDirtyCardsClosure 5775 MarkFromDirtyCardsClosure
5696 markFromDirtyCardsClosure(this, _span, 5776 markFromDirtyCardsClosure(this, _span,
5697 NULL, // space is set further below 5777 NULL, // space is set further below
5698 &_markBitMap, &_markStack, &mrias_cl); 5778 &_markBitMap, &_markStack, &mrias_cl);
5699 { 5779 {
5700 TraceTime t("grey object rescan", PrintGCDetails, false, gclog_or_tty); 5780 GCTraceTime t("grey object rescan", PrintGCDetails, false, _gc_timer_cm);
5701 // Iterate over the dirty cards, setting the corresponding bits in the 5781 // Iterate over the dirty cards, setting the corresponding bits in the
5702 // mod union table. 5782 // mod union table.
5703 { 5783 {
5704 ModUnionClosure modUnionClosure(&_modUnionTable); 5784 ModUnionClosure modUnionClosure(&_modUnionTable);
5705 _ct->ct_bs()->dirty_card_iterate( 5785 _ct->ct_bs()->dirty_card_iterate(
5732 GenCollectedHeap::heap()->total_collections() >= VerifyGCStartAt) { 5812 GenCollectedHeap::heap()->total_collections() >= VerifyGCStartAt) {
5733 HandleMark hm; // Discard invalid handles created during verification 5813 HandleMark hm; // Discard invalid handles created during verification
5734 Universe::verify(); 5814 Universe::verify();
5735 } 5815 }
5736 { 5816 {
5737 TraceTime t("root rescan", PrintGCDetails, false, gclog_or_tty); 5817 GCTraceTime t("root rescan", PrintGCDetails, false, _gc_timer_cm);
5738 5818
5739 verify_work_stacks_empty(); 5819 verify_work_stacks_empty();
5740 5820
5741 gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel. 5821 gch->rem_set()->prepare_for_younger_refs_iterate(false); // Not parallel.
5742 GenCollectedHeap::StrongRootsScope srs(gch); 5822 GenCollectedHeap::StrongRootsScope srs(gch);
5754 || (roots_scanning_options() & SharedHeap::SO_CodeCache), 5834 || (roots_scanning_options() & SharedHeap::SO_CodeCache),
5755 "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops"); 5835 "if we didn't scan the code cache, we have to be ready to drop nmethods with expired weak oops");
5756 } 5836 }
5757 5837
5758 { 5838 {
5759 TraceTime t("visit unhandled CLDs", PrintGCDetails, false, gclog_or_tty); 5839 GCTraceTime t("visit unhandled CLDs", PrintGCDetails, false, _gc_timer_cm);
5760 5840
5761 verify_work_stacks_empty(); 5841 verify_work_stacks_empty();
5762 5842
5763 // Scan all class loader data objects that might have been introduced 5843 // Scan all class loader data objects that might have been introduced
5764 // during concurrent marking. 5844 // during concurrent marking.
5773 5853
5774 verify_work_stacks_empty(); 5854 verify_work_stacks_empty();
5775 } 5855 }
5776 5856
5777 { 5857 {
5778 TraceTime t("dirty klass scan", PrintGCDetails, false, gclog_or_tty); 5858 GCTraceTime t("dirty klass scan", PrintGCDetails, false, _gc_timer_cm);
5779 5859
5780 verify_work_stacks_empty(); 5860 verify_work_stacks_empty();
5781 5861
5782 RemarkKlassClosure remark_klass_closure(&mrias_cl); 5862 RemarkKlassClosure remark_klass_closure(&mrias_cl);
5783 ClassLoaderDataGraph::classes_do(&remark_klass_closure); 5863 ClassLoaderDataGraph::classes_do(&remark_klass_closure);
5975 &_markStack, false /* !preclean */); 6055 &_markStack, false /* !preclean */);
5976 CMSDrainMarkingStackClosure cmsDrainMarkingStackClosure(this, 6056 CMSDrainMarkingStackClosure cmsDrainMarkingStackClosure(this,
5977 _span, &_markBitMap, &_markStack, 6057 _span, &_markBitMap, &_markStack,
5978 &cmsKeepAliveClosure, false /* !preclean */); 6058 &cmsKeepAliveClosure, false /* !preclean */);
5979 { 6059 {
5980 TraceTime t("weak refs processing", PrintGCDetails, false, gclog_or_tty); 6060 GCTraceTime t("weak refs processing", PrintGCDetails, false, _gc_timer_cm);
6061
6062 ReferenceProcessorStats stats;
5981 if (rp->processing_is_mt()) { 6063 if (rp->processing_is_mt()) {
5982 // Set the degree of MT here. If the discovery is done MT, there 6064 // Set the degree of MT here. If the discovery is done MT, there
5983 // may have been a different number of threads doing the discovery 6065 // may have been a different number of threads doing the discovery
5984 // and a different number of discovered lists may have Ref objects. 6066 // and a different number of discovered lists may have Ref objects.
5985 // That is OK as long as the Reference lists are balanced (see 6067 // That is OK as long as the Reference lists are balanced (see
5994 // investigate. 6076 // investigate.
5995 assert(active_workers > 0, "Should have been set during scavenge"); 6077 assert(active_workers > 0, "Should have been set during scavenge");
5996 } 6078 }
5997 rp->set_active_mt_degree(active_workers); 6079 rp->set_active_mt_degree(active_workers);
5998 CMSRefProcTaskExecutor task_executor(*this); 6080 CMSRefProcTaskExecutor task_executor(*this);
5999 rp->process_discovered_references(&_is_alive_closure, 6081 stats = rp->process_discovered_references(&_is_alive_closure,
6000 &cmsKeepAliveClosure, 6082 &cmsKeepAliveClosure,
6001 &cmsDrainMarkingStackClosure, 6083 &cmsDrainMarkingStackClosure,
6002 &task_executor); 6084 &task_executor,
6085 _gc_timer_cm);
6003 } else { 6086 } else {
6004 rp->process_discovered_references(&_is_alive_closure, 6087 stats = rp->process_discovered_references(&_is_alive_closure,
6005 &cmsKeepAliveClosure, 6088 &cmsKeepAliveClosure,
6006 &cmsDrainMarkingStackClosure, 6089 &cmsDrainMarkingStackClosure,
6007 NULL); 6090 NULL,
6008 } 6091 _gc_timer_cm);
6092 }
6093 _gc_tracer_cm->report_gc_reference_stats(stats);
6094
6009 } 6095 }
6010 6096
6011 // This is the point where the entire marking should have completed. 6097 // This is the point where the entire marking should have completed.
6012 verify_work_stacks_empty(); 6098 verify_work_stacks_empty();
6013 6099
6014 if (should_unload_classes()) { 6100 if (should_unload_classes()) {
6015 { 6101 {
6016 TraceTime t("class unloading", PrintGCDetails, false, gclog_or_tty); 6102 GCTraceTime t("class unloading", PrintGCDetails, false, _gc_timer_cm);
6017 6103
6018 // Unload classes and purge the SystemDictionary. 6104 // Unload classes and purge the SystemDictionary.
6019 bool purged_class = SystemDictionary::do_unloading(&_is_alive_closure); 6105 bool purged_class = SystemDictionary::do_unloading(&_is_alive_closure);
6020 6106
6021 // Unload nmethods. 6107 // Unload nmethods.
6024 // Prune dead klasses from subklass/sibling/implementor lists. 6110 // Prune dead klasses from subklass/sibling/implementor lists.
6025 Klass::clean_weak_klass_links(&_is_alive_closure); 6111 Klass::clean_weak_klass_links(&_is_alive_closure);
6026 } 6112 }
6027 6113
6028 { 6114 {
6029 TraceTime t("scrub symbol table", PrintGCDetails, false, gclog_or_tty); 6115 GCTraceTime t("scrub symbol table", PrintGCDetails, false, _gc_timer_cm);
6030 // Clean up unreferenced symbols in symbol table. 6116 // Clean up unreferenced symbols in symbol table.
6031 SymbolTable::unlink(); 6117 SymbolTable::unlink();
6032 } 6118 }
6033 } 6119 }
6034 6120
6035 // CMS doesn't use the StringTable as hard roots when class unloading is turned off. 6121 // CMS doesn't use the StringTable as hard roots when class unloading is turned off.
6036 // Need to check if we really scanned the StringTable. 6122 // Need to check if we really scanned the StringTable.
6037 if ((roots_scanning_options() & SharedHeap::SO_Strings) == 0) { 6123 if ((roots_scanning_options() & SharedHeap::SO_Strings) == 0) {
6038 TraceTime t("scrub string table", PrintGCDetails, false, gclog_or_tty); 6124 GCTraceTime t("scrub string table", PrintGCDetails, false, _gc_timer_cm);
6039 // Delete entries for dead interned strings. 6125 // Delete entries for dead interned strings.
6040 StringTable::unlink(&_is_alive_closure); 6126 StringTable::unlink(&_is_alive_closure);
6041 } 6127 }
6042 6128
6043 // Restore any preserved marks as a result of mark stack or 6129 // Restore any preserved marks as a result of mark stack or
6378 NOT_PRODUCT( 6464 NOT_PRODUCT(
6379 if (RotateCMSCollectionTypes) { 6465 if (RotateCMSCollectionTypes) {
6380 _cmsGen->rotate_debug_collection_type(); 6466 _cmsGen->rotate_debug_collection_type();
6381 } 6467 }
6382 ) 6468 )
6469
6470 register_gc_end();
6383 } 6471 }
6384 6472
6385 void CMSCollector::do_CMS_operation(CMS_op_type op, GCCause::Cause gc_cause) { 6473 void CMSCollector::do_CMS_operation(CMS_op_type op, GCCause::Cause gc_cause) {
6386 gclog_or_tty->date_stamp(PrintGC && PrintGCDateStamps); 6474 gclog_or_tty->date_stamp(PrintGC && PrintGCDateStamps);
6387 TraceCPUTime tcpu(PrintGCDetails, true, gclog_or_tty); 6475 TraceCPUTime tcpu(PrintGCDetails, true, gclog_or_tty);
6388 TraceTime t(GCCauseString("GC", gc_cause), PrintGC, !PrintGCDetails, gclog_or_tty); 6476 GCTraceTime t(GCCauseString("GC", gc_cause), PrintGC, !PrintGCDetails, NULL);
6389 TraceCollectorStats tcs(counters()); 6477 TraceCollectorStats tcs(counters());
6390 6478
6391 switch (op) { 6479 switch (op) {
6392 case CMS_op_checkpointRootsInitial: { 6480 case CMS_op_checkpointRootsInitial: {
6393 SvcGCMarker sgcm(SvcGCMarker::OTHER); 6481 SvcGCMarker sgcm(SvcGCMarker::OTHER);