Mercurial > hg > truffle
comparison src/share/vm/opto/compile.hpp @ 7196:2aff40cb4703
7092905: C2: Keep track of the number of dead nodes
Summary: keep an (almost) accurate running count of the reachable (live) flow graph nodes.
Reviewed-by: kvn, twisti, jrose, vlivanov
author | bharadwaj |
---|---|
date | Tue, 27 Nov 2012 17:24:15 -0800 |
parents | cfe522e6461c |
children | ad5dd04754ee |
comparison
equal
deleted
inserted
replaced
7195:2cd5e15048e6 | 7196:2aff40cb4703 |
---|---|
73 class TypePtr; | 73 class TypePtr; |
74 class TypeFunc; | 74 class TypeFunc; |
75 class Unique_Node_List; | 75 class Unique_Node_List; |
76 class nmethod; | 76 class nmethod; |
77 class WarmCallInfo; | 77 class WarmCallInfo; |
78 class Node_Stack; | |
79 struct Final_Reshape_Counts; | |
78 | 80 |
79 //------------------------------Compile---------------------------------------- | 81 //------------------------------Compile---------------------------------------- |
80 // This class defines a top-level Compiler invocation. | 82 // This class defines a top-level Compiler invocation. |
81 | 83 |
82 class Compile : public Phase { | 84 class Compile : public Phase { |
96 // (The time collection itself is always conditionalized on TimeCompiler.) | 98 // (The time collection itself is always conditionalized on TimeCompiler.) |
97 class TracePhase : public TraceTime { | 99 class TracePhase : public TraceTime { |
98 private: | 100 private: |
99 Compile* C; | 101 Compile* C; |
100 CompileLog* _log; | 102 CompileLog* _log; |
103 const char* _phase_name; | |
104 bool _dolog; | |
101 public: | 105 public: |
102 TracePhase(const char* name, elapsedTimer* accumulator, bool dolog); | 106 TracePhase(const char* name, elapsedTimer* accumulator, bool dolog); |
103 ~TracePhase(); | 107 ~TracePhase(); |
104 }; | 108 }; |
105 | 109 |
311 IdealGraphPrinter* _printer; | 315 IdealGraphPrinter* _printer; |
312 #endif | 316 #endif |
313 | 317 |
314 // Node management | 318 // Node management |
315 uint _unique; // Counter for unique Node indices | 319 uint _unique; // Counter for unique Node indices |
320 VectorSet _dead_node_list; // Set of dead nodes | |
321 uint _dead_node_count; // Number of dead nodes; VectorSet::Size() is O(N). | |
322 // So use this to keep count and make the call O(1). | |
316 debug_only(static int _debug_idx;) // Monotonic counter (not reset), use -XX:BreakAtNode=<idx> | 323 debug_only(static int _debug_idx;) // Monotonic counter (not reset), use -XX:BreakAtNode=<idx> |
317 Arena _node_arena; // Arena for new-space Nodes | 324 Arena _node_arena; // Arena for new-space Nodes |
318 Arena _old_arena; // Arena for old-space Nodes, lifetime during xform | 325 Arena _old_arena; // Arena for old-space Nodes, lifetime during xform |
319 RootNode* _root; // Unique root of compilation, or NULL after bail-out. | 326 RootNode* _root; // Unique root of compilation, or NULL after bail-out. |
320 Node* _top; // Unique top node. (Reset by various phases.) | 327 Node* _top; // Unique top node. (Reset by various phases.) |
532 // Compilation environment. | 539 // Compilation environment. |
533 Arena* comp_arena() { return &_comp_arena; } | 540 Arena* comp_arena() { return &_comp_arena; } |
534 ciEnv* env() const { return _env; } | 541 ciEnv* env() const { return _env; } |
535 CompileLog* log() const { return _log; } | 542 CompileLog* log() const { return _log; } |
536 bool failing() const { return _env->failing() || _failure_reason != NULL; } | 543 bool failing() const { return _env->failing() || _failure_reason != NULL; } |
537 const char* failure_reason() { return _failure_reason; } | 544 const char* failure_reason() { return _failure_reason; } |
538 bool failure_reason_is(const char* r) { return (r==_failure_reason) || (r!=NULL && _failure_reason!=NULL && strcmp(r, _failure_reason)==0); } | 545 bool failure_reason_is(const char* r) { return (r==_failure_reason) || (r!=NULL && _failure_reason!=NULL && strcmp(r, _failure_reason)==0); } |
539 | 546 |
540 void record_failure(const char* reason); | 547 void record_failure(const char* reason); |
541 void record_method_not_compilable(const char* reason, bool all_tiers = false) { | 548 void record_method_not_compilable(const char* reason, bool all_tiers = false) { |
542 // All bailouts cover "all_tiers" when TieredCompilation is off. | 549 // All bailouts cover "all_tiers" when TieredCompilation is off. |
547 } | 554 } |
548 void record_method_not_compilable_all_tiers(const char* reason) { | 555 void record_method_not_compilable_all_tiers(const char* reason) { |
549 record_method_not_compilable(reason, true); | 556 record_method_not_compilable(reason, true); |
550 } | 557 } |
551 bool check_node_count(uint margin, const char* reason) { | 558 bool check_node_count(uint margin, const char* reason) { |
552 if (unique() + margin > (uint)MaxNodeLimit) { | 559 if (live_nodes() + margin > (uint)MaxNodeLimit) { |
553 record_method_not_compilable(reason); | 560 record_method_not_compilable(reason); |
554 return true; | 561 return true; |
555 } else { | 562 } else { |
556 return false; | 563 return false; |
557 } | 564 } |
558 } | 565 } |
559 | 566 |
560 // Node management | 567 // Node management |
561 uint unique() const { return _unique; } | 568 uint unique() const { return _unique; } |
562 uint next_unique() { return _unique++; } | 569 uint next_unique() { return _unique++; } |
563 void set_unique(uint i) { _unique = i; } | 570 void set_unique(uint i) { _unique = i; } |
564 static int debug_idx() { return debug_only(_debug_idx)+0; } | 571 static int debug_idx() { return debug_only(_debug_idx)+0; } |
565 static void set_debug_idx(int i) { debug_only(_debug_idx = i); } | 572 static void set_debug_idx(int i) { debug_only(_debug_idx = i); } |
566 Arena* node_arena() { return &_node_arena; } | 573 Arena* node_arena() { return &_node_arena; } |
567 Arena* old_arena() { return &_old_arena; } | 574 Arena* old_arena() { return &_old_arena; } |
568 RootNode* root() const { return _root; } | 575 RootNode* root() const { return _root; } |
569 void set_root(RootNode* r) { _root = r; } | 576 void set_root(RootNode* r) { _root = r; } |
570 StartNode* start() const; // (Derived from root.) | 577 StartNode* start() const; // (Derived from root.) |
571 void init_start(StartNode* s); | 578 void init_start(StartNode* s); |
572 Node* immutable_memory(); | 579 Node* immutable_memory(); |
573 | 580 |
574 Node* recent_alloc_ctl() const { return _recent_alloc_ctl; } | 581 Node* recent_alloc_ctl() const { return _recent_alloc_ctl; } |
575 Node* recent_alloc_obj() const { return _recent_alloc_obj; } | 582 Node* recent_alloc_obj() const { return _recent_alloc_obj; } |
576 void set_recent_alloc(Node* ctl, Node* obj) { | 583 void set_recent_alloc(Node* ctl, Node* obj) { |
577 _recent_alloc_ctl = ctl; | 584 _recent_alloc_ctl = ctl; |
578 _recent_alloc_obj = obj; | 585 _recent_alloc_obj = obj; |
579 } | 586 } |
587 void record_dead_node(uint idx) { if (_dead_node_list.test_set(idx)) return; | |
588 _dead_node_count++; | |
589 } | |
590 uint dead_node_count() { return _dead_node_count; } | |
591 void reset_dead_node_list() { _dead_node_list.Reset(); | |
592 _dead_node_count = 0; | |
593 } | |
594 uint live_nodes() { | |
595 int val = _unique - _dead_node_count; | |
596 assert (val >= 0, err_msg_res("number of tracked dead nodes %d more than created nodes %d", _unique, _dead_node_count)); | |
597 return (uint) val; | |
598 } | |
599 #ifdef ASSERT | |
600 uint count_live_nodes_by_graph_walk(); | |
601 void print_missing_nodes(); | |
602 #endif | |
580 | 603 |
581 // Constant table | 604 // Constant table |
582 ConstantTable& constant_table() { return _constant_table; } | 605 ConstantTable& constant_table() { return _constant_table; } |
583 | 606 |
584 MachConstantBaseNode* mach_constant_base_node(); | 607 MachConstantBaseNode* mach_constant_base_node(); |
676 // record_for_igvn as needed. | 699 // record_for_igvn as needed. |
677 void gvn_replace_by(Node* n, Node* nn); | 700 void gvn_replace_by(Node* n, Node* nn); |
678 | 701 |
679 | 702 |
680 void identify_useful_nodes(Unique_Node_List &useful); | 703 void identify_useful_nodes(Unique_Node_List &useful); |
704 void update_dead_node_list(Unique_Node_List &useful); | |
681 void remove_useless_nodes (Unique_Node_List &useful); | 705 void remove_useless_nodes (Unique_Node_List &useful); |
682 | 706 |
683 WarmCallInfo* warm_calls() const { return _warm_calls; } | 707 WarmCallInfo* warm_calls() const { return _warm_calls; } |
684 void set_warm_calls(WarmCallInfo* l) { _warm_calls = l; } | 708 void set_warm_calls(WarmCallInfo* l) { _warm_calls = l; } |
685 WarmCallInfo* pop_warm_call(); | 709 WarmCallInfo* pop_warm_call(); |
890 | 914 |
891 #ifndef PRODUCT | 915 #ifndef PRODUCT |
892 static juint _intrinsic_hist_count[vmIntrinsics::ID_LIMIT]; | 916 static juint _intrinsic_hist_count[vmIntrinsics::ID_LIMIT]; |
893 static jubyte _intrinsic_hist_flags[vmIntrinsics::ID_LIMIT]; | 917 static jubyte _intrinsic_hist_flags[vmIntrinsics::ID_LIMIT]; |
894 #endif | 918 #endif |
919 // Function calls made by the public function final_graph_reshaping. | |
920 // No need to be made public as they are not called elsewhere. | |
921 void final_graph_reshaping_impl( Node *n, Final_Reshape_Counts &frc); | |
922 void final_graph_reshaping_walk( Node_Stack &nstack, Node *root, Final_Reshape_Counts &frc ); | |
923 void eliminate_redundant_card_marks(Node* n); | |
895 | 924 |
896 public: | 925 public: |
897 | 926 |
898 // Note: Histogram array size is about 1 Kb. | 927 // Note: Histogram array size is about 1 Kb. |
899 enum { // flag bits: | 928 enum { // flag bits: |