comparison src/share/vm/gc_implementation/g1/g1CollectedHeap.hpp @ 10405:f2110083203d

8005849: JEP 167: Event-Based JVM Tracing Reviewed-by: acorn, coleenp, sla Contributed-by: Karen Kinnear <karen.kinnear@oracle.com>, Bengt Rutisson <bengt.rutisson@oracle.com>, Calvin Cheung <calvin.cheung@oracle.com>, Erik Gahlin <erik.gahlin@oracle.com>, Erik Helin <erik.helin@oracle.com>, Jesper Wilhelmsson <jesper.wilhelmsson@oracle.com>, Keith McGuigan <keith.mcguigan@oracle.com>, Mattias Tobiasson <mattias.tobiasson@oracle.com>, Markus Gronlund <markus.gronlund@oracle.com>, Mikael Auno <mikael.auno@oracle.com>, Nils Eliasson <nils.eliasson@oracle.com>, Nils Loodin <nils.loodin@oracle.com>, Rickard Backman <rickard.backman@oracle.com>, Staffan Larsen <staffan.larsen@oracle.com>, Stefan Karlsson <stefan.karlsson@oracle.com>, Yekaterina Kantserova <yekaterina.kantserova@oracle.com>
author sla
date Mon, 10 Jun 2013 11:30:51 +0200
parents 87c64c0438fb
children 836a62f43af9 71180a6e5080
comparison
equal deleted inserted replaced
10404:d0add7016434 10405:f2110083203d
1 /* 1 /*
2 * Copyright (c) 2001, 2012, Oracle and/or its affiliates. All rights reserved. 2 * Copyright (c) 2001, 2013, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
24 24
25 #ifndef SHARE_VM_GC_IMPLEMENTATION_G1_G1COLLECTEDHEAP_HPP 25 #ifndef SHARE_VM_GC_IMPLEMENTATION_G1_G1COLLECTEDHEAP_HPP
26 #define SHARE_VM_GC_IMPLEMENTATION_G1_G1COLLECTEDHEAP_HPP 26 #define SHARE_VM_GC_IMPLEMENTATION_G1_G1COLLECTEDHEAP_HPP
27 27
28 #include "gc_implementation/g1/concurrentMark.hpp" 28 #include "gc_implementation/g1/concurrentMark.hpp"
29 #include "gc_implementation/g1/evacuationInfo.hpp"
29 #include "gc_implementation/g1/g1AllocRegion.hpp" 30 #include "gc_implementation/g1/g1AllocRegion.hpp"
30 #include "gc_implementation/g1/g1HRPrinter.hpp" 31 #include "gc_implementation/g1/g1HRPrinter.hpp"
32 #include "gc_implementation/g1/g1MonitoringSupport.hpp"
31 #include "gc_implementation/g1/g1RemSet.hpp" 33 #include "gc_implementation/g1/g1RemSet.hpp"
32 #include "gc_implementation/g1/g1MonitoringSupport.hpp" 34 #include "gc_implementation/g1/g1YCTypes.hpp"
33 #include "gc_implementation/g1/heapRegionSeq.hpp" 35 #include "gc_implementation/g1/heapRegionSeq.hpp"
34 #include "gc_implementation/g1/heapRegionSets.hpp" 36 #include "gc_implementation/g1/heapRegionSets.hpp"
35 #include "gc_implementation/shared/hSpaceCounters.hpp" 37 #include "gc_implementation/shared/hSpaceCounters.hpp"
36 #include "gc_implementation/shared/parGCAllocBuffer.hpp" 38 #include "gc_implementation/shared/parGCAllocBuffer.hpp"
37 #include "memory/barrierSet.hpp" 39 #include "memory/barrierSet.hpp"
59 class G1RemSet; 61 class G1RemSet;
60 class HeapRegionRemSetIterator; 62 class HeapRegionRemSetIterator;
61 class ConcurrentMark; 63 class ConcurrentMark;
62 class ConcurrentMarkThread; 64 class ConcurrentMarkThread;
63 class ConcurrentG1Refine; 65 class ConcurrentG1Refine;
66 class ConcurrentGCTimer;
64 class GenerationCounters; 67 class GenerationCounters;
68 class STWGCTimer;
69 class G1NewTracer;
70 class G1OldTracer;
71 class EvacuationFailedInfo;
65 72
66 typedef OverflowTaskQueue<StarTask, mtGC> RefToScanQueue; 73 typedef OverflowTaskQueue<StarTask, mtGC> RefToScanQueue;
67 typedef GenericTaskQueueSet<RefToScanQueue, mtGC> RefToScanQueueSet; 74 typedef GenericTaskQueueSet<RefToScanQueue, mtGC> RefToScanQueueSet;
68 75
69 typedef int RegionIdx_t; // needs to hold [ 0..max_regions() ) 76 typedef int RegionIdx_t; // needs to hold [ 0..max_regions() )
158 165
159 // The G1 STW is alive closure. 166 // The G1 STW is alive closure.
160 // An instance is embedded into the G1CH and used as the 167 // An instance is embedded into the G1CH and used as the
161 // (optional) _is_alive_non_header closure in the STW 168 // (optional) _is_alive_non_header closure in the STW
162 // reference processor. It is also extensively used during 169 // reference processor. It is also extensively used during
163 // refence processing during STW evacuation pauses. 170 // reference processing during STW evacuation pauses.
164 class G1STWIsAliveClosure: public BoolObjectClosure { 171 class G1STWIsAliveClosure: public BoolObjectClosure {
165 G1CollectedHeap* _g1; 172 G1CollectedHeap* _g1;
166 public: 173 public:
167 G1STWIsAliveClosure(G1CollectedHeap* g1) : _g1(g1) {} 174 G1STWIsAliveClosure(G1CollectedHeap* g1) : _g1(g1) {}
168 bool do_object_b(oop p); 175 bool do_object_b(oop p);
321 328
322 // It releases the mutator alloc region. 329 // It releases the mutator alloc region.
323 void release_mutator_alloc_region(); 330 void release_mutator_alloc_region();
324 331
325 // It initializes the GC alloc regions at the start of a GC. 332 // It initializes the GC alloc regions at the start of a GC.
326 void init_gc_alloc_regions(); 333 void init_gc_alloc_regions(EvacuationInfo& evacuation_info);
327 334
328 // It releases the GC alloc regions at the end of a GC. 335 // It releases the GC alloc regions at the end of a GC.
329 void release_gc_alloc_regions(uint no_of_gc_workers); 336 void release_gc_alloc_regions(uint no_of_gc_workers, EvacuationInfo& evacuation_info);
330 337
331 // It does any cleanup that needs to be done on the GC alloc regions 338 // It does any cleanup that needs to be done on the GC alloc regions
332 // before a Full GC. 339 // before a Full GC.
333 void abandon_gc_alloc_regions(); 340 void abandon_gc_alloc_regions();
334 341
386 volatile unsigned int _old_marking_cycles_started; 393 volatile unsigned int _old_marking_cycles_started;
387 394
388 // Keeps track of how many "old marking cycles" (i.e., Full GCs or 395 // Keeps track of how many "old marking cycles" (i.e., Full GCs or
389 // concurrent cycles) we have completed. 396 // concurrent cycles) we have completed.
390 volatile unsigned int _old_marking_cycles_completed; 397 volatile unsigned int _old_marking_cycles_completed;
398
399 bool _concurrent_cycle_started;
391 400
392 // This is a non-product method that is helpful for testing. It is 401 // This is a non-product method that is helpful for testing. It is
393 // called at the end of a GC and artificially expands the heap by 402 // called at the end of a GC and artificially expands the heap by
394 // allocating a number of dead regions. This way we can induce very 403 // allocating a number of dead regions. This way we can induce very
395 // frequent marking cycles and stress the cleanup / concurrent 404 // frequent marking cycles and stress the cleanup / concurrent
732 741
733 unsigned int old_marking_cycles_completed() { 742 unsigned int old_marking_cycles_completed() {
734 return _old_marking_cycles_completed; 743 return _old_marking_cycles_completed;
735 } 744 }
736 745
746 void register_concurrent_cycle_start(jlong start_time);
747 void register_concurrent_cycle_end();
748 void trace_heap_after_concurrent_cycle();
749
750 G1YCType yc_type();
751
737 G1HRPrinter* hr_printer() { return &_hr_printer; } 752 G1HRPrinter* hr_printer() { return &_hr_printer; }
738 753
739 protected: 754 protected:
740 755
741 // Shrink the garbage-first heap by at most the given size (in bytes!). 756 // Shrink the garbage-first heap by at most the given size (in bytes!).
767 // thread. It returns false if it is unable to do the collection due 782 // thread. It returns false if it is unable to do the collection due
768 // to the GC locker being active, true otherwise 783 // to the GC locker being active, true otherwise
769 bool do_collection_pause_at_safepoint(double target_pause_time_ms); 784 bool do_collection_pause_at_safepoint(double target_pause_time_ms);
770 785
771 // Actually do the work of evacuating the collection set. 786 // Actually do the work of evacuating the collection set.
772 void evacuate_collection_set(); 787 void evacuate_collection_set(EvacuationInfo& evacuation_info);
773 788
774 // The g1 remembered set of the heap. 789 // The g1 remembered set of the heap.
775 G1RemSet* _g1_rem_set; 790 G1RemSet* _g1_rem_set;
776 // And it's mod ref barrier set, used to track updates for the above. 791 // And it's mod ref barrier set, used to track updates for the above.
777 ModRefBarrierSet* _mr_bs; 792 ModRefBarrierSet* _mr_bs;
792 // set in the event of an evacuation failure. 807 // set in the event of an evacuation failure.
793 DirtyCardQueueSet _into_cset_dirty_card_queue_set; 808 DirtyCardQueueSet _into_cset_dirty_card_queue_set;
794 809
795 // After a collection pause, make the regions in the CS into free 810 // After a collection pause, make the regions in the CS into free
796 // regions. 811 // regions.
797 void free_collection_set(HeapRegion* cs_head); 812 void free_collection_set(HeapRegion* cs_head, EvacuationInfo& evacuation_info);
798 813
799 // Abandon the current collection set without recording policy 814 // Abandon the current collection set without recording policy
800 // statistics or updating free lists. 815 // statistics or updating free lists.
801 void abandon_collection_set(HeapRegion* cs_head); 816 void abandon_collection_set(HeapRegion* cs_head);
802 817
861 RefToScanQueueSet *_task_queues; 876 RefToScanQueueSet *_task_queues;
862 877
863 // True iff a evacuation has failed in the current collection. 878 // True iff a evacuation has failed in the current collection.
864 bool _evacuation_failed; 879 bool _evacuation_failed;
865 880
866 // Set the attribute indicating whether evacuation has failed in the 881 EvacuationFailedInfo* _evacuation_failed_info_array;
867 // current collection.
868 void set_evacuation_failed(bool b) { _evacuation_failed = b; }
869 882
870 // Failed evacuations cause some logical from-space objects to have 883 // Failed evacuations cause some logical from-space objects to have
871 // forwarding pointers to themselves. Reset them. 884 // forwarding pointers to themselves. Reset them.
872 void remove_self_forwarding_pointers(); 885 void remove_self_forwarding_pointers();
873 886
905 // Do any necessary cleanup for evacuation-failure handling data 918 // Do any necessary cleanup for evacuation-failure handling data
906 // structures. 919 // structures.
907 void finalize_for_evac_failure(); 920 void finalize_for_evac_failure();
908 921
909 // An attempt to evacuate "obj" has failed; take necessary steps. 922 // An attempt to evacuate "obj" has failed; take necessary steps.
910 oop handle_evacuation_failure_par(OopsInHeapRegionClosure* cl, oop obj); 923 oop handle_evacuation_failure_par(G1ParScanThreadState* _par_scan_state, oop obj);
911 void handle_evacuation_failure_common(oop obj, markOop m); 924 void handle_evacuation_failure_common(oop obj, markOop m);
912 925
913 #ifndef PRODUCT 926 #ifndef PRODUCT
914 // Support for forcing evacuation failures. Analogous to 927 // Support for forcing evacuation failures. Analogous to
915 // PromotionFailureALot for the other collectors. 928 // PromotionFailureALot for the other collectors.
937 950
938 // Return true if it's time to cause an evacuation failure. 951 // Return true if it's time to cause an evacuation failure.
939 inline bool evacuation_should_fail(); 952 inline bool evacuation_should_fail();
940 953
941 // Reset the G1EvacuationFailureALot counters. Should be called at 954 // Reset the G1EvacuationFailureALot counters. Should be called at
942 // the end of an evacuation pause in which an evacuation failure ocurred. 955 // the end of an evacuation pause in which an evacuation failure occurred.
943 inline void reset_evacuation_should_fail(); 956 inline void reset_evacuation_should_fail();
944 #endif // !PRODUCT 957 #endif // !PRODUCT
945 958
946 // ("Weak") Reference processing support. 959 // ("Weak") Reference processing support.
947 // 960 //
948 // G1 has 2 instances of the referece processor class. One 961 // G1 has 2 instances of the reference processor class. One
949 // (_ref_processor_cm) handles reference object discovery 962 // (_ref_processor_cm) handles reference object discovery
950 // and subsequent processing during concurrent marking cycles. 963 // and subsequent processing during concurrent marking cycles.
951 // 964 //
952 // The other (_ref_processor_stw) handles reference object 965 // The other (_ref_processor_stw) handles reference object
953 // discovery and processing during full GCs and incremental 966 // discovery and processing during full GCs and incremental
993 // lists (also checked as a precondition during initial marking). 1006 // lists (also checked as a precondition during initial marking).
994 1007
995 // The (stw) reference processor... 1008 // The (stw) reference processor...
996 ReferenceProcessor* _ref_processor_stw; 1009 ReferenceProcessor* _ref_processor_stw;
997 1010
1011 STWGCTimer* _gc_timer_stw;
1012 ConcurrentGCTimer* _gc_timer_cm;
1013
1014 G1OldTracer* _gc_tracer_cm;
1015 G1NewTracer* _gc_tracer_stw;
1016
998 // During reference object discovery, the _is_alive_non_header 1017 // During reference object discovery, the _is_alive_non_header
999 // closure (if non-null) is applied to the referent object to 1018 // closure (if non-null) is applied to the referent object to
1000 // determine whether the referent is live. If so then the 1019 // determine whether the referent is live. If so then the
1001 // reference object does not need to be 'discovered' and can 1020 // reference object does not need to be 'discovered' and can
1002 // be treated as a regular oop. This has the benefit of reducing 1021 // be treated as a regular oop. This has the benefit of reducing
1138 // Reference Processing accessors 1157 // Reference Processing accessors
1139 1158
1140 // The STW reference processor.... 1159 // The STW reference processor....
1141 ReferenceProcessor* ref_processor_stw() const { return _ref_processor_stw; } 1160 ReferenceProcessor* ref_processor_stw() const { return _ref_processor_stw; }
1142 1161
1143 // The Concurent Marking reference processor... 1162 // The Concurrent Marking reference processor...
1144 ReferenceProcessor* ref_processor_cm() const { return _ref_processor_cm; } 1163 ReferenceProcessor* ref_processor_cm() const { return _ref_processor_cm; }
1164
1165 ConcurrentGCTimer* gc_timer_cm() const { return _gc_timer_cm; }
1166 G1OldTracer* gc_tracer_cm() const { return _gc_tracer_cm; }
1145 1167
1146 virtual size_t capacity() const; 1168 virtual size_t capacity() const;
1147 virtual size_t used() const; 1169 virtual size_t used() const;
1148 // This should be called when we're not holding the heap lock. The 1170 // This should be called when we're not holding the heap lock. The
1149 // result might be a bit inaccurate. 1171 // result might be a bit inaccurate.
1198 // necessary (i.e., during heap verification). 1220 // necessary (i.e., during heap verification).
1199 void verify_region_sets(); 1221 void verify_region_sets();
1200 1222
1201 // verify_region_sets_optional() is planted in the code for 1223 // verify_region_sets_optional() is planted in the code for
1202 // list verification in non-product builds (and it can be enabled in 1224 // list verification in non-product builds (and it can be enabled in
1203 // product builds by definning HEAP_REGION_SET_FORCE_VERIFY to be 1). 1225 // product builds by defining HEAP_REGION_SET_FORCE_VERIFY to be 1).
1204 #if HEAP_REGION_SET_FORCE_VERIFY 1226 #if HEAP_REGION_SET_FORCE_VERIFY
1205 void verify_region_sets_optional() { 1227 void verify_region_sets_optional() {
1206 verify_region_sets(); 1228 verify_region_sets();
1207 } 1229 }
1208 #else // HEAP_REGION_SET_FORCE_VERIFY 1230 #else // HEAP_REGION_SET_FORCE_VERIFY
1264 virtual void collect(GCCause::Cause cause); 1286 virtual void collect(GCCause::Cause cause);
1265 1287
1266 // The same as above but assume that the caller holds the Heap_lock. 1288 // The same as above but assume that the caller holds the Heap_lock.
1267 void collect_locked(GCCause::Cause cause); 1289 void collect_locked(GCCause::Cause cause);
1268 1290
1269 // True iff a evacuation has failed in the most-recent collection. 1291 // True iff an evacuation has failed in the most-recent collection.
1270 bool evacuation_failed() { return _evacuation_failed; } 1292 bool evacuation_failed() { return _evacuation_failed; }
1271 1293
1272 // It will free a region if it has allocated objects in it that are 1294 // It will free a region if it has allocated objects in it that are
1273 // all dead. It calls either free_region() or 1295 // all dead. It calls either free_region() or
1274 // free_humongous_region() depending on the type of the region that 1296 // free_humongous_region() depending on the type of the region that
1552 // full GC. 1574 // full GC.
1553 void verify(bool silent, VerifyOption vo); 1575 void verify(bool silent, VerifyOption vo);
1554 1576
1555 // Override; it uses the "prev" marking information 1577 // Override; it uses the "prev" marking information
1556 virtual void verify(bool silent); 1578 virtual void verify(bool silent);
1579
1557 virtual void print_on(outputStream* st) const; 1580 virtual void print_on(outputStream* st) const;
1558 virtual void print_extended_on(outputStream* st) const; 1581 virtual void print_extended_on(outputStream* st) const;
1559 virtual void print_on_error(outputStream* st) const; 1582 virtual void print_on_error(outputStream* st) const;
1560 1583
1561 virtual void print_gc_threads_on(outputStream* st) const; 1584 virtual void print_gc_threads_on(outputStream* st) const;
1837 1860
1838 OopsInHeapRegionClosure* _evac_failure_cl; 1861 OopsInHeapRegionClosure* _evac_failure_cl;
1839 G1ParScanHeapEvacClosure* _evac_cl; 1862 G1ParScanHeapEvacClosure* _evac_cl;
1840 G1ParScanPartialArrayClosure* _partial_scan_cl; 1863 G1ParScanPartialArrayClosure* _partial_scan_cl;
1841 1864
1842 int _hash_seed; 1865 int _hash_seed;
1843 uint _queue_num; 1866 uint _queue_num;
1844 1867
1845 size_t _term_attempts; 1868 size_t _term_attempts;
1846 1869
1847 double _start; 1870 double _start;