Mercurial > hg > truffle
comparison src/share/vm/opto/lcm.cpp @ 1693:6c9cc03d8726
6973329: C2 with Zero based COOP produces code with broken anti-dependency on x86
Summary: Recompile without subsuming loads if RA try to clone a node with anti_dependence.
Reviewed-by: never
author | kvn |
---|---|
date | Wed, 11 Aug 2010 10:48:20 -0700 |
parents | 0e35fa8ebccd |
children | 3e8fbc61cee8 |
comparison
equal
deleted
inserted
replaced
1692:d2ede61b7a12 | 1693:6c9cc03d8726 |
---|---|
111 for (DUIterator i = val->outs(); val->has_out(i); i++) { | 111 for (DUIterator i = val->outs(); val->has_out(i); i++) { |
112 Node *m = val->out(i); | 112 Node *m = val->out(i); |
113 if( !m->is_Mach() ) continue; | 113 if( !m->is_Mach() ) continue; |
114 MachNode *mach = m->as_Mach(); | 114 MachNode *mach = m->as_Mach(); |
115 was_store = false; | 115 was_store = false; |
116 switch( mach->ideal_Opcode() ) { | 116 int iop = mach->ideal_Opcode(); |
117 switch( iop ) { | |
117 case Op_LoadB: | 118 case Op_LoadB: |
118 case Op_LoadUS: | 119 case Op_LoadUS: |
119 case Op_LoadD: | 120 case Op_LoadD: |
120 case Op_LoadF: | 121 case Op_LoadF: |
121 case Op_LoadI: | 122 case Op_LoadI: |
153 // embedded loads | 154 // embedded loads |
154 continue; | 155 continue; |
155 default: // Also check for embedded loads | 156 default: // Also check for embedded loads |
156 if( !mach->needs_anti_dependence_check() ) | 157 if( !mach->needs_anti_dependence_check() ) |
157 continue; // Not an memory op; skip it | 158 continue; // Not an memory op; skip it |
159 if( must_clone[iop] ) { | |
160 // Do not move nodes which produce flags because | |
161 // RA will try to clone it to place near branch and | |
162 // it will cause recompilation, see clone_node(). | |
163 continue; | |
164 } | |
158 { | 165 { |
159 // Check that value is used in memory address in | 166 // Check that value is used in memory address in |
160 // instructions with embedded load (CmpP val1,(val2+off)). | 167 // instructions with embedded load (CmpP val1,(val2+off)). |
161 Node* base; | 168 Node* base; |
162 Node* index; | 169 Node* index; |
955 // 'end' indices. | 962 // 'end' indices. |
956 for( uint i = 0; i < _num_succs; i++ ) { | 963 for( uint i = 0; i < _num_succs; i++ ) { |
957 Block *sb = _succs[i]; | 964 Block *sb = _succs[i]; |
958 // Clone the entire area; ignoring the edge fixup for now. | 965 // Clone the entire area; ignoring the edge fixup for now. |
959 for( uint j = end; j > beg; j-- ) { | 966 for( uint j = end; j > beg; j-- ) { |
967 // It is safe here to clone a node with anti_dependence | |
968 // since clones dominate on each path. | |
960 Node *clone = _nodes[j-1]->clone(); | 969 Node *clone = _nodes[j-1]->clone(); |
961 sb->_nodes.insert( 1, clone ); | 970 sb->_nodes.insert( 1, clone ); |
962 bbs.map(clone->_idx,sb); | 971 bbs.map(clone->_idx,sb); |
963 } | 972 } |
964 } | 973 } |