comparison src/share/vm/gc_implementation/parNew/parNewGeneration.cpp @ 453:c96030fff130

6684579: SoftReference processing can be made more efficient Summary: For current soft-ref clearing policies, we can decide at marking time if a soft-reference will definitely not be cleared, postponing the decision of whether it will definitely be cleared to the final reference processing phase. This can be especially beneficial in the case of concurrent collectors where the marking is usually concurrent but reference processing is usually not. Reviewed-by: jmasa
author ysr
date Thu, 20 Nov 2008 16:56:09 -0800
parents 850fdf70db2b
children df4305d4c1a1
comparison
equal deleted inserted replaced
452:00b023ae2d78 453:c96030fff130
757 " Pushes: %7d Pops: %7d Steals %7d (sum = %7d).\n", 757 " Pushes: %7d Pops: %7d Steals %7d (sum = %7d).\n",
758 thread_state_set.pushes(), thread_state_set.pops(), 758 thread_state_set.pushes(), thread_state_set.pops(),
759 thread_state_set.steals(), 759 thread_state_set.steals(),
760 thread_state_set.pops()+thread_state_set.steals()); 760 thread_state_set.pops()+thread_state_set.steals());
761 } 761 }
762 assert(thread_state_set.pushes() == thread_state_set.pops() + thread_state_set.steals(), 762 assert(thread_state_set.pushes() == thread_state_set.pops()
763 + thread_state_set.steals(),
763 "Or else the queues are leaky."); 764 "Or else the queues are leaky.");
764 765
765 // For now, process discovered weak refs sequentially.
766 #ifdef COMPILER2
767 ReferencePolicy *soft_ref_policy = new LRUMaxHeapPolicy();
768 #else
769 ReferencePolicy *soft_ref_policy = new LRUCurrentHeapPolicy();
770 #endif // COMPILER2
771
772 // Process (weak) reference objects found during scavenge. 766 // Process (weak) reference objects found during scavenge.
767 ReferenceProcessor* rp = ref_processor();
773 IsAliveClosure is_alive(this); 768 IsAliveClosure is_alive(this);
774 ScanWeakRefClosure scan_weak_ref(this); 769 ScanWeakRefClosure scan_weak_ref(this);
775 KeepAliveClosure keep_alive(&scan_weak_ref); 770 KeepAliveClosure keep_alive(&scan_weak_ref);
776 ScanClosure scan_without_gc_barrier(this, false); 771 ScanClosure scan_without_gc_barrier(this, false);
777 ScanClosureWithParBarrier scan_with_gc_barrier(this, true); 772 ScanClosureWithParBarrier scan_with_gc_barrier(this, true);
778 set_promo_failure_scan_stack_closure(&scan_without_gc_barrier); 773 set_promo_failure_scan_stack_closure(&scan_without_gc_barrier);
779 EvacuateFollowersClosureGeneral evacuate_followers(gch, _level, 774 EvacuateFollowersClosureGeneral evacuate_followers(gch, _level,
780 &scan_without_gc_barrier, &scan_with_gc_barrier); 775 &scan_without_gc_barrier, &scan_with_gc_barrier);
781 if (ref_processor()->processing_is_mt()) { 776 rp->snap_policy(clear_all_soft_refs);
777 if (rp->processing_is_mt()) {
782 ParNewRefProcTaskExecutor task_executor(*this, thread_state_set); 778 ParNewRefProcTaskExecutor task_executor(*this, thread_state_set);
783 ref_processor()->process_discovered_references( 779 rp->process_discovered_references(&is_alive, &keep_alive,
784 soft_ref_policy, &is_alive, &keep_alive, &evacuate_followers, 780 &evacuate_followers, &task_executor);
785 &task_executor);
786 } else { 781 } else {
787 thread_state_set.flush(); 782 thread_state_set.flush();
788 gch->set_par_threads(0); // 0 ==> non-parallel. 783 gch->set_par_threads(0); // 0 ==> non-parallel.
789 gch->save_marks(); 784 gch->save_marks();
790 ref_processor()->process_discovered_references( 785 rp->process_discovered_references(&is_alive, &keep_alive,
791 soft_ref_policy, &is_alive, &keep_alive, &evacuate_followers, 786 &evacuate_followers, NULL);
792 NULL);
793 } 787 }
794 if (!promotion_failed()) { 788 if (!promotion_failed()) {
795 // Swap the survivor spaces. 789 // Swap the survivor spaces.
796 eden()->clear(SpaceDecorator::Mangle); 790 eden()->clear(SpaceDecorator::Mangle);
797 from()->clear(SpaceDecorator::Mangle); 791 from()->clear(SpaceDecorator::Mangle);
849 843
850 update_time_of_last_gc(os::javaTimeMillis()); 844 update_time_of_last_gc(os::javaTimeMillis());
851 845
852 SpecializationStats::print(); 846 SpecializationStats::print();
853 847
854 ref_processor()->set_enqueuing_is_done(true); 848 rp->set_enqueuing_is_done(true);
855 if (ref_processor()->processing_is_mt()) { 849 if (rp->processing_is_mt()) {
856 ParNewRefProcTaskExecutor task_executor(*this, thread_state_set); 850 ParNewRefProcTaskExecutor task_executor(*this, thread_state_set);
857 ref_processor()->enqueue_discovered_references(&task_executor); 851 rp->enqueue_discovered_references(&task_executor);
858 } else { 852 } else {
859 ref_processor()->enqueue_discovered_references(NULL); 853 rp->enqueue_discovered_references(NULL);
860 } 854 }
861 ref_processor()->verify_no_references_recorded(); 855 rp->verify_no_references_recorded();
862 } 856 }
863 857
864 static int sum; 858 static int sum;
865 void ParNewGeneration::waste_some_time() { 859 void ParNewGeneration::waste_some_time() {
866 for (int i = 0; i < 100; i++) { 860 for (int i = 0; i < 100; i++) {