Mercurial > hg > truffle
annotate src/share/vm/opto/callnode.cpp @ 1721:413ad0331a0c
6977924: Changes for 6975078 produce build error with certain gcc versions
Summary: The changes introduced for 6975078 assign badHeapOopVal to the _allocation field in the ResourceObj class. In 32 bit linux builds with certain versions of gcc this assignment will be flagged as an error while compiling allocation.cpp. In 32 bit builds the constant value badHeapOopVal (which is cast to an intptr_t) is negative. The _allocation field is typed as an unsigned intptr_t and gcc catches this as an error.
Reviewed-by: jcoomes, ysr, phh
author | johnc |
---|---|
date | Wed, 18 Aug 2010 10:59:06 -0700 |
parents | 60a14ad85270 |
children | f95d63e2154a |
rev | line source |
---|---|
0 | 1 /* |
1552
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1080
diff
changeset
|
2 * Copyright (c) 1997, 2009, Oracle and/or its affiliates. All rights reserved. |
0 | 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 * | |
5 * This code is free software; you can redistribute it and/or modify it | |
6 * under the terms of the GNU General Public License version 2 only, as | |
7 * published by the Free Software Foundation. | |
8 * | |
9 * This code is distributed in the hope that it will be useful, but WITHOUT | |
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
12 * version 2 for more details (a copy is included in the LICENSE file that | |
13 * accompanied this code). | |
14 * | |
15 * You should have received a copy of the GNU General Public License version | |
16 * 2 along with this work; if not, write to the Free Software Foundation, | |
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. | |
18 * | |
1552
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1080
diff
changeset
|
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1080
diff
changeset
|
20 * or visit www.oracle.com if you need additional information or have any |
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1080
diff
changeset
|
21 * questions. |
0 | 22 * |
23 */ | |
24 | |
25 // Portions of code courtesy of Clifford Click | |
26 | |
27 // Optimization - Graph Style | |
28 | |
29 #include "incls/_precompiled.incl" | |
30 #include "incls/_callnode.cpp.incl" | |
31 | |
32 //============================================================================= | |
33 uint StartNode::size_of() const { return sizeof(*this); } | |
34 uint StartNode::cmp( const Node &n ) const | |
35 { return _domain == ((StartNode&)n)._domain; } | |
36 const Type *StartNode::bottom_type() const { return _domain; } | |
37 const Type *StartNode::Value(PhaseTransform *phase) const { return _domain; } | |
38 #ifndef PRODUCT | |
39 void StartNode::dump_spec(outputStream *st) const { st->print(" #"); _domain->dump_on(st);} | |
40 #endif | |
41 | |
42 //------------------------------Ideal------------------------------------------ | |
43 Node *StartNode::Ideal(PhaseGVN *phase, bool can_reshape){ | |
44 return remove_dead_region(phase, can_reshape) ? this : NULL; | |
45 } | |
46 | |
47 //------------------------------calling_convention----------------------------- | |
48 void StartNode::calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const { | |
49 Matcher::calling_convention( sig_bt, parm_regs, argcnt, false ); | |
50 } | |
51 | |
52 //------------------------------Registers-------------------------------------- | |
53 const RegMask &StartNode::in_RegMask(uint) const { | |
54 return RegMask::Empty; | |
55 } | |
56 | |
57 //------------------------------match------------------------------------------ | |
58 // Construct projections for incoming parameters, and their RegMask info | |
59 Node *StartNode::match( const ProjNode *proj, const Matcher *match ) { | |
60 switch (proj->_con) { | |
61 case TypeFunc::Control: | |
62 case TypeFunc::I_O: | |
63 case TypeFunc::Memory: | |
64 return new (match->C, 1) MachProjNode(this,proj->_con,RegMask::Empty,MachProjNode::unmatched_proj); | |
65 case TypeFunc::FramePtr: | |
66 return new (match->C, 1) MachProjNode(this,proj->_con,Matcher::c_frame_ptr_mask, Op_RegP); | |
67 case TypeFunc::ReturnAdr: | |
68 return new (match->C, 1) MachProjNode(this,proj->_con,match->_return_addr_mask,Op_RegP); | |
69 case TypeFunc::Parms: | |
70 default: { | |
71 uint parm_num = proj->_con - TypeFunc::Parms; | |
72 const Type *t = _domain->field_at(proj->_con); | |
73 if (t->base() == Type::Half) // 2nd half of Longs and Doubles | |
74 return new (match->C, 1) ConNode(Type::TOP); | |
75 uint ideal_reg = Matcher::base2reg[t->base()]; | |
76 RegMask &rm = match->_calling_convention_mask[parm_num]; | |
77 return new (match->C, 1) MachProjNode(this,proj->_con,rm,ideal_reg); | |
78 } | |
79 } | |
80 return NULL; | |
81 } | |
82 | |
83 //------------------------------StartOSRNode---------------------------------- | |
84 // The method start node for an on stack replacement adapter | |
85 | |
86 //------------------------------osr_domain----------------------------- | |
87 const TypeTuple *StartOSRNode::osr_domain() { | |
88 const Type **fields = TypeTuple::fields(2); | |
89 fields[TypeFunc::Parms+0] = TypeRawPtr::BOTTOM; // address of osr buffer | |
90 | |
91 return TypeTuple::make(TypeFunc::Parms+1, fields); | |
92 } | |
93 | |
94 //============================================================================= | |
95 const char * const ParmNode::names[TypeFunc::Parms+1] = { | |
96 "Control", "I_O", "Memory", "FramePtr", "ReturnAdr", "Parms" | |
97 }; | |
98 | |
99 #ifndef PRODUCT | |
100 void ParmNode::dump_spec(outputStream *st) const { | |
101 if( _con < TypeFunc::Parms ) { | |
102 st->print(names[_con]); | |
103 } else { | |
104 st->print("Parm%d: ",_con-TypeFunc::Parms); | |
105 // Verbose and WizardMode dump bottom_type for all nodes | |
106 if( !Verbose && !WizardMode ) bottom_type()->dump_on(st); | |
107 } | |
108 } | |
109 #endif | |
110 | |
111 uint ParmNode::ideal_reg() const { | |
112 switch( _con ) { | |
113 case TypeFunc::Control : // fall through | |
114 case TypeFunc::I_O : // fall through | |
115 case TypeFunc::Memory : return 0; | |
116 case TypeFunc::FramePtr : // fall through | |
117 case TypeFunc::ReturnAdr: return Op_RegP; | |
118 default : assert( _con > TypeFunc::Parms, "" ); | |
119 // fall through | |
120 case TypeFunc::Parms : { | |
121 // Type of argument being passed | |
122 const Type *t = in(0)->as_Start()->_domain->field_at(_con); | |
123 return Matcher::base2reg[t->base()]; | |
124 } | |
125 } | |
126 ShouldNotReachHere(); | |
127 return 0; | |
128 } | |
129 | |
130 //============================================================================= | |
131 ReturnNode::ReturnNode(uint edges, Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *retadr ) : Node(edges) { | |
132 init_req(TypeFunc::Control,cntrl); | |
133 init_req(TypeFunc::I_O,i_o); | |
134 init_req(TypeFunc::Memory,memory); | |
135 init_req(TypeFunc::FramePtr,frameptr); | |
136 init_req(TypeFunc::ReturnAdr,retadr); | |
137 } | |
138 | |
139 Node *ReturnNode::Ideal(PhaseGVN *phase, bool can_reshape){ | |
140 return remove_dead_region(phase, can_reshape) ? this : NULL; | |
141 } | |
142 | |
143 const Type *ReturnNode::Value( PhaseTransform *phase ) const { | |
144 return ( phase->type(in(TypeFunc::Control)) == Type::TOP) | |
145 ? Type::TOP | |
146 : Type::BOTTOM; | |
147 } | |
148 | |
149 // Do we Match on this edge index or not? No edges on return nodes | |
150 uint ReturnNode::match_edge(uint idx) const { | |
151 return 0; | |
152 } | |
153 | |
154 | |
155 #ifndef PRODUCT | |
156 void ReturnNode::dump_req() const { | |
157 // Dump the required inputs, enclosed in '(' and ')' | |
158 uint i; // Exit value of loop | |
159 for( i=0; i<req(); i++ ) { // For all required inputs | |
160 if( i == TypeFunc::Parms ) tty->print("returns"); | |
161 if( in(i) ) tty->print("%c%d ", Compile::current()->node_arena()->contains(in(i)) ? ' ' : 'o', in(i)->_idx); | |
162 else tty->print("_ "); | |
163 } | |
164 } | |
165 #endif | |
166 | |
167 //============================================================================= | |
168 RethrowNode::RethrowNode( | |
169 Node* cntrl, | |
170 Node* i_o, | |
171 Node* memory, | |
172 Node* frameptr, | |
173 Node* ret_adr, | |
174 Node* exception | |
175 ) : Node(TypeFunc::Parms + 1) { | |
176 init_req(TypeFunc::Control , cntrl ); | |
177 init_req(TypeFunc::I_O , i_o ); | |
178 init_req(TypeFunc::Memory , memory ); | |
179 init_req(TypeFunc::FramePtr , frameptr ); | |
180 init_req(TypeFunc::ReturnAdr, ret_adr); | |
181 init_req(TypeFunc::Parms , exception); | |
182 } | |
183 | |
184 Node *RethrowNode::Ideal(PhaseGVN *phase, bool can_reshape){ | |
185 return remove_dead_region(phase, can_reshape) ? this : NULL; | |
186 } | |
187 | |
188 const Type *RethrowNode::Value( PhaseTransform *phase ) const { | |
189 return (phase->type(in(TypeFunc::Control)) == Type::TOP) | |
190 ? Type::TOP | |
191 : Type::BOTTOM; | |
192 } | |
193 | |
194 uint RethrowNode::match_edge(uint idx) const { | |
195 return 0; | |
196 } | |
197 | |
198 #ifndef PRODUCT | |
199 void RethrowNode::dump_req() const { | |
200 // Dump the required inputs, enclosed in '(' and ')' | |
201 uint i; // Exit value of loop | |
202 for( i=0; i<req(); i++ ) { // For all required inputs | |
203 if( i == TypeFunc::Parms ) tty->print("exception"); | |
204 if( in(i) ) tty->print("%c%d ", Compile::current()->node_arena()->contains(in(i)) ? ' ' : 'o', in(i)->_idx); | |
205 else tty->print("_ "); | |
206 } | |
207 } | |
208 #endif | |
209 | |
210 //============================================================================= | |
211 // Do we Match on this edge index or not? Match only target address & method | |
212 uint TailCallNode::match_edge(uint idx) const { | |
213 return TypeFunc::Parms <= idx && idx <= TypeFunc::Parms+1; | |
214 } | |
215 | |
216 //============================================================================= | |
217 // Do we Match on this edge index or not? Match only target address & oop | |
218 uint TailJumpNode::match_edge(uint idx) const { | |
219 return TypeFunc::Parms <= idx && idx <= TypeFunc::Parms+1; | |
220 } | |
221 | |
222 //============================================================================= | |
223 JVMState::JVMState(ciMethod* method, JVMState* caller) { | |
224 assert(method != NULL, "must be valid call site"); | |
225 _method = method; | |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
226 _reexecute = Reexecute_Undefined; |
0 | 227 debug_only(_bci = -99); // random garbage value |
228 debug_only(_map = (SafePointNode*)-1); | |
229 _caller = caller; | |
230 _depth = 1 + (caller == NULL ? 0 : caller->depth()); | |
231 _locoff = TypeFunc::Parms; | |
232 _stkoff = _locoff + _method->max_locals(); | |
233 _monoff = _stkoff + _method->max_stack(); | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
234 _scloff = _monoff; |
0 | 235 _endoff = _monoff; |
236 _sp = 0; | |
237 } | |
238 JVMState::JVMState(int stack_size) { | |
239 _method = NULL; | |
240 _bci = InvocationEntryBci; | |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
241 _reexecute = Reexecute_Undefined; |
0 | 242 debug_only(_map = (SafePointNode*)-1); |
243 _caller = NULL; | |
244 _depth = 1; | |
245 _locoff = TypeFunc::Parms; | |
246 _stkoff = _locoff; | |
247 _monoff = _stkoff + stack_size; | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
248 _scloff = _monoff; |
0 | 249 _endoff = _monoff; |
250 _sp = 0; | |
251 } | |
252 | |
253 //--------------------------------of_depth------------------------------------- | |
254 JVMState* JVMState::of_depth(int d) const { | |
255 const JVMState* jvmp = this; | |
256 assert(0 < d && (uint)d <= depth(), "oob"); | |
257 for (int skip = depth() - d; skip > 0; skip--) { | |
258 jvmp = jvmp->caller(); | |
259 } | |
260 assert(jvmp->depth() == (uint)d, "found the right one"); | |
261 return (JVMState*)jvmp; | |
262 } | |
263 | |
264 //-----------------------------same_calls_as----------------------------------- | |
265 bool JVMState::same_calls_as(const JVMState* that) const { | |
266 if (this == that) return true; | |
267 if (this->depth() != that->depth()) return false; | |
268 const JVMState* p = this; | |
269 const JVMState* q = that; | |
270 for (;;) { | |
271 if (p->_method != q->_method) return false; | |
272 if (p->_method == NULL) return true; // bci is irrelevant | |
273 if (p->_bci != q->_bci) return false; | |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
274 if (p->_reexecute != q->_reexecute) return false; |
0 | 275 p = p->caller(); |
276 q = q->caller(); | |
277 if (p == q) return true; | |
278 assert(p != NULL && q != NULL, "depth check ensures we don't run off end"); | |
279 } | |
280 } | |
281 | |
282 //------------------------------debug_start------------------------------------ | |
283 uint JVMState::debug_start() const { | |
284 debug_only(JVMState* jvmroot = of_depth(1)); | |
285 assert(jvmroot->locoff() <= this->locoff(), "youngest JVMState must be last"); | |
286 return of_depth(1)->locoff(); | |
287 } | |
288 | |
289 //-------------------------------debug_end------------------------------------- | |
290 uint JVMState::debug_end() const { | |
291 debug_only(JVMState* jvmroot = of_depth(1)); | |
292 assert(jvmroot->endoff() <= this->endoff(), "youngest JVMState must be last"); | |
293 return endoff(); | |
294 } | |
295 | |
296 //------------------------------debug_depth------------------------------------ | |
297 uint JVMState::debug_depth() const { | |
298 uint total = 0; | |
299 for (const JVMState* jvmp = this; jvmp != NULL; jvmp = jvmp->caller()) { | |
300 total += jvmp->debug_size(); | |
301 } | |
302 return total; | |
303 } | |
304 | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
305 #ifndef PRODUCT |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
306 |
0 | 307 //------------------------------format_helper---------------------------------- |
308 // Given an allocation (a Chaitin object) and a Node decide if the Node carries | |
309 // any defined value or not. If it does, print out the register or constant. | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
310 static void format_helper( PhaseRegAlloc *regalloc, outputStream* st, Node *n, const char *msg, uint i, GrowableArray<SafePointScalarObjectNode*> *scobjs ) { |
0 | 311 if (n == NULL) { st->print(" NULL"); return; } |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
312 if (n->is_SafePointScalarObject()) { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
313 // Scalar replacement. |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
314 SafePointScalarObjectNode* spobj = n->as_SafePointScalarObject(); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
315 scobjs->append_if_missing(spobj); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
316 int sco_n = scobjs->find(spobj); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
317 assert(sco_n >= 0, ""); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
318 st->print(" %s%d]=#ScObj" INT32_FORMAT, msg, i, sco_n); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
319 return; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
320 } |
0 | 321 if( OptoReg::is_valid(regalloc->get_reg_first(n))) { // Check for undefined |
322 char buf[50]; | |
323 regalloc->dump_register(n,buf); | |
324 st->print(" %s%d]=%s",msg,i,buf); | |
325 } else { // No register, but might be constant | |
326 const Type *t = n->bottom_type(); | |
327 switch (t->base()) { | |
328 case Type::Int: | |
329 st->print(" %s%d]=#"INT32_FORMAT,msg,i,t->is_int()->get_con()); | |
330 break; | |
331 case Type::AnyPtr: | |
332 assert( t == TypePtr::NULL_PTR, "" ); | |
333 st->print(" %s%d]=#NULL",msg,i); | |
334 break; | |
335 case Type::AryPtr: | |
336 case Type::KlassPtr: | |
337 case Type::InstPtr: | |
338 st->print(" %s%d]=#Ptr" INTPTR_FORMAT,msg,i,t->isa_oopptr()->const_oop()); | |
339 break; | |
331
cecd8eb4e0ca
6706829: Compressed Oops: add debug info for narrow oops
kvn
parents:
305
diff
changeset
|
340 case Type::NarrowOop: |
cecd8eb4e0ca
6706829: Compressed Oops: add debug info for narrow oops
kvn
parents:
305
diff
changeset
|
341 st->print(" %s%d]=#Ptr" INTPTR_FORMAT,msg,i,t->make_ptr()->isa_oopptr()->const_oop()); |
cecd8eb4e0ca
6706829: Compressed Oops: add debug info for narrow oops
kvn
parents:
305
diff
changeset
|
342 break; |
0 | 343 case Type::RawPtr: |
344 st->print(" %s%d]=#Raw" INTPTR_FORMAT,msg,i,t->is_rawptr()); | |
345 break; | |
346 case Type::DoubleCon: | |
347 st->print(" %s%d]=#%fD",msg,i,t->is_double_constant()->_d); | |
348 break; | |
349 case Type::FloatCon: | |
350 st->print(" %s%d]=#%fF",msg,i,t->is_float_constant()->_f); | |
351 break; | |
352 case Type::Long: | |
353 st->print(" %s%d]=#"INT64_FORMAT,msg,i,t->is_long()->get_con()); | |
354 break; | |
355 case Type::Half: | |
356 case Type::Top: | |
357 st->print(" %s%d]=_",msg,i); | |
358 break; | |
359 default: ShouldNotReachHere(); | |
360 } | |
361 } | |
362 } | |
363 | |
364 //------------------------------format----------------------------------------- | |
365 void JVMState::format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) const { | |
366 st->print(" #"); | |
367 if( _method ) { | |
368 _method->print_short_name(st); | |
369 st->print(" @ bci:%d ",_bci); | |
370 } else { | |
371 st->print_cr(" runtime stub "); | |
372 return; | |
373 } | |
374 if (n->is_MachSafePoint()) { | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
375 GrowableArray<SafePointScalarObjectNode*> scobjs; |
0 | 376 MachSafePointNode *mcall = n->as_MachSafePoint(); |
377 uint i; | |
378 // Print locals | |
379 for( i = 0; i < (uint)loc_size(); i++ ) | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
380 format_helper( regalloc, st, mcall->local(this, i), "L[", i, &scobjs ); |
0 | 381 // Print stack |
382 for (i = 0; i < (uint)stk_size(); i++) { | |
383 if ((uint)(_stkoff + i) >= mcall->len()) | |
384 st->print(" oob "); | |
385 else | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
386 format_helper( regalloc, st, mcall->stack(this, i), "STK[", i, &scobjs ); |
0 | 387 } |
388 for (i = 0; (int)i < nof_monitors(); i++) { | |
389 Node *box = mcall->monitor_box(this, i); | |
390 Node *obj = mcall->monitor_obj(this, i); | |
391 if ( OptoReg::is_valid(regalloc->get_reg_first(box)) ) { | |
392 while( !box->is_BoxLock() ) box = box->in(1); | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
393 format_helper( regalloc, st, box, "MON-BOX[", i, &scobjs ); |
0 | 394 } else { |
395 OptoReg::Name box_reg = BoxLockNode::stack_slot(box); | |
396 st->print(" MON-BOX%d=%s+%d", | |
397 i, | |
398 OptoReg::regname(OptoReg::c_frame_pointer), | |
399 regalloc->reg2offset(box_reg)); | |
400 } | |
460
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
401 const char* obj_msg = "MON-OBJ["; |
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
402 if (EliminateLocks) { |
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
403 while( !box->is_BoxLock() ) box = box->in(1); |
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
404 if (box->as_BoxLock()->is_eliminated()) |
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
405 obj_msg = "MON-OBJ(LOCK ELIMINATED)["; |
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
406 } |
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
407 format_helper( regalloc, st, obj, obj_msg, i, &scobjs ); |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
408 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
409 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
410 for (i = 0; i < (uint)scobjs.length(); i++) { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
411 // Scalar replaced objects. |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
412 st->print_cr(""); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
413 st->print(" # ScObj" INT32_FORMAT " ", i); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
414 SafePointScalarObjectNode* spobj = scobjs.at(i); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
415 ciKlass* cik = spobj->bottom_type()->is_oopptr()->klass(); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
416 assert(cik->is_instance_klass() || |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
417 cik->is_array_klass(), "Not supported allocation."); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
418 ciInstanceKlass *iklass = NULL; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
419 if (cik->is_instance_klass()) { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
420 cik->print_name_on(st); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
421 iklass = cik->as_instance_klass(); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
422 } else if (cik->is_type_array_klass()) { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
423 cik->as_array_klass()->base_element_type()->print_name_on(st); |
1040
873ec3787992
6892186: SA does not dump debug info for scalar replaced objects
kvn
parents:
931
diff
changeset
|
424 st->print("[%d]", spobj->n_fields()); |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
425 } else if (cik->is_obj_array_klass()) { |
1040
873ec3787992
6892186: SA does not dump debug info for scalar replaced objects
kvn
parents:
931
diff
changeset
|
426 ciKlass* cie = cik->as_obj_array_klass()->base_element_klass(); |
873ec3787992
6892186: SA does not dump debug info for scalar replaced objects
kvn
parents:
931
diff
changeset
|
427 if (cie->is_instance_klass()) { |
873ec3787992
6892186: SA does not dump debug info for scalar replaced objects
kvn
parents:
931
diff
changeset
|
428 cie->print_name_on(st); |
873ec3787992
6892186: SA does not dump debug info for scalar replaced objects
kvn
parents:
931
diff
changeset
|
429 } else if (cie->is_type_array_klass()) { |
873ec3787992
6892186: SA does not dump debug info for scalar replaced objects
kvn
parents:
931
diff
changeset
|
430 cie->as_array_klass()->base_element_type()->print_name_on(st); |
873ec3787992
6892186: SA does not dump debug info for scalar replaced objects
kvn
parents:
931
diff
changeset
|
431 } else { |
873ec3787992
6892186: SA does not dump debug info for scalar replaced objects
kvn
parents:
931
diff
changeset
|
432 ShouldNotReachHere(); |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
433 } |
1040
873ec3787992
6892186: SA does not dump debug info for scalar replaced objects
kvn
parents:
931
diff
changeset
|
434 st->print("[%d]", spobj->n_fields()); |
873ec3787992
6892186: SA does not dump debug info for scalar replaced objects
kvn
parents:
931
diff
changeset
|
435 int ndim = cik->as_array_klass()->dimension() - 1; |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
436 while (ndim-- > 0) { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
437 st->print("[]"); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
438 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
439 } |
1040
873ec3787992
6892186: SA does not dump debug info for scalar replaced objects
kvn
parents:
931
diff
changeset
|
440 st->print("={"); |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
441 uint nf = spobj->n_fields(); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
442 if (nf > 0) { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
443 uint first_ind = spobj->first_index(); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
444 Node* fld_node = mcall->in(first_ind); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
445 ciField* cifield; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
446 if (iklass != NULL) { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
447 st->print(" ["); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
448 cifield = iklass->nonstatic_field_at(0); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
449 cifield->print_name_on(st); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
450 format_helper( regalloc, st, fld_node, ":", 0, &scobjs ); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
451 } else { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
452 format_helper( regalloc, st, fld_node, "[", 0, &scobjs ); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
453 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
454 for (uint j = 1; j < nf; j++) { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
455 fld_node = mcall->in(first_ind+j); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
456 if (iklass != NULL) { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
457 st->print(", ["); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
458 cifield = iklass->nonstatic_field_at(j); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
459 cifield->print_name_on(st); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
460 format_helper( regalloc, st, fld_node, ":", j, &scobjs ); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
461 } else { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
462 format_helper( regalloc, st, fld_node, ", [", j, &scobjs ); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
463 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
464 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
465 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
466 st->print(" }"); |
0 | 467 } |
468 } | |
469 st->print_cr(""); | |
470 if (caller() != NULL) caller()->format(regalloc, n, st); | |
471 } | |
472 | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
473 |
0 | 474 void JVMState::dump_spec(outputStream *st) const { |
475 if (_method != NULL) { | |
476 bool printed = false; | |
477 if (!Verbose) { | |
478 // The JVMS dumps make really, really long lines. | |
479 // Take out the most boring parts, which are the package prefixes. | |
480 char buf[500]; | |
481 stringStream namest(buf, sizeof(buf)); | |
482 _method->print_short_name(&namest); | |
483 if (namest.count() < sizeof(buf)) { | |
484 const char* name = namest.base(); | |
485 if (name[0] == ' ') ++name; | |
486 const char* endcn = strchr(name, ':'); // end of class name | |
487 if (endcn == NULL) endcn = strchr(name, '('); | |
488 if (endcn == NULL) endcn = name + strlen(name); | |
489 while (endcn > name && endcn[-1] != '.' && endcn[-1] != '/') | |
490 --endcn; | |
491 st->print(" %s", endcn); | |
492 printed = true; | |
493 } | |
494 } | |
495 if (!printed) | |
496 _method->print_short_name(st); | |
497 st->print(" @ bci:%d",_bci); | |
931
72088be4b386
6873116: Modify reexecute implementation to use pcDesc to record the reexecute bit
cfang
parents:
903
diff
changeset
|
498 if(_reexecute == Reexecute_True) |
72088be4b386
6873116: Modify reexecute implementation to use pcDesc to record the reexecute bit
cfang
parents:
903
diff
changeset
|
499 st->print(" reexecute"); |
0 | 500 } else { |
501 st->print(" runtime stub"); | |
502 } | |
503 if (caller() != NULL) caller()->dump_spec(st); | |
504 } | |
505 | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
506 |
0 | 507 void JVMState::dump_on(outputStream* st) const { |
508 if (_map && !((uintptr_t)_map & 1)) { | |
509 if (_map->len() > _map->req()) { // _map->has_exceptions() | |
510 Node* ex = _map->in(_map->req()); // _map->next_exception() | |
511 // skip the first one; it's already being printed | |
512 while (ex != NULL && ex->len() > ex->req()) { | |
513 ex = ex->in(ex->req()); // ex->next_exception() | |
514 ex->dump(1); | |
515 } | |
516 } | |
517 _map->dump(2); | |
518 } | |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
519 st->print("JVMS depth=%d loc=%d stk=%d mon=%d scalar=%d end=%d mondepth=%d sp=%d bci=%d reexecute=%s method=", |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
520 depth(), locoff(), stkoff(), monoff(), scloff(), endoff(), monitor_depth(), sp(), bci(), should_reexecute()?"true":"false"); |
0 | 521 if (_method == NULL) { |
522 st->print_cr("(none)"); | |
523 } else { | |
524 _method->print_name(st); | |
525 st->cr(); | |
526 if (bci() >= 0 && bci() < _method->code_size()) { | |
527 st->print(" bc: "); | |
528 _method->print_codes_on(bci(), bci()+1, st); | |
529 } | |
530 } | |
531 if (caller() != NULL) { | |
532 caller()->dump_on(st); | |
533 } | |
534 } | |
535 | |
536 // Extra way to dump a jvms from the debugger, | |
537 // to avoid a bug with C++ member function calls. | |
538 void dump_jvms(JVMState* jvms) { | |
539 jvms->dump(); | |
540 } | |
541 #endif | |
542 | |
543 //--------------------------clone_shallow-------------------------------------- | |
544 JVMState* JVMState::clone_shallow(Compile* C) const { | |
545 JVMState* n = has_method() ? new (C) JVMState(_method, _caller) : new (C) JVMState(0); | |
546 n->set_bci(_bci); | |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
547 n->_reexecute = _reexecute; |
0 | 548 n->set_locoff(_locoff); |
549 n->set_stkoff(_stkoff); | |
550 n->set_monoff(_monoff); | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
551 n->set_scloff(_scloff); |
0 | 552 n->set_endoff(_endoff); |
553 n->set_sp(_sp); | |
554 n->set_map(_map); | |
555 return n; | |
556 } | |
557 | |
558 //---------------------------clone_deep---------------------------------------- | |
559 JVMState* JVMState::clone_deep(Compile* C) const { | |
560 JVMState* n = clone_shallow(C); | |
561 for (JVMState* p = n; p->_caller != NULL; p = p->_caller) { | |
562 p->_caller = p->_caller->clone_shallow(C); | |
563 } | |
564 assert(n->depth() == depth(), "sanity"); | |
565 assert(n->debug_depth() == debug_depth(), "sanity"); | |
566 return n; | |
567 } | |
568 | |
569 //============================================================================= | |
570 uint CallNode::cmp( const Node &n ) const | |
571 { return _tf == ((CallNode&)n)._tf && _jvms == ((CallNode&)n)._jvms; } | |
572 #ifndef PRODUCT | |
573 void CallNode::dump_req() const { | |
574 // Dump the required inputs, enclosed in '(' and ')' | |
575 uint i; // Exit value of loop | |
576 for( i=0; i<req(); i++ ) { // For all required inputs | |
577 if( i == TypeFunc::Parms ) tty->print("("); | |
578 if( in(i) ) tty->print("%c%d ", Compile::current()->node_arena()->contains(in(i)) ? ' ' : 'o', in(i)->_idx); | |
579 else tty->print("_ "); | |
580 } | |
581 tty->print(")"); | |
582 } | |
583 | |
584 void CallNode::dump_spec(outputStream *st) const { | |
585 st->print(" "); | |
586 tf()->dump_on(st); | |
587 if (_cnt != COUNT_UNKNOWN) st->print(" C=%f",_cnt); | |
588 if (jvms() != NULL) jvms()->dump_spec(st); | |
589 } | |
590 #endif | |
591 | |
592 const Type *CallNode::bottom_type() const { return tf()->range(); } | |
593 const Type *CallNode::Value(PhaseTransform *phase) const { | |
594 if (phase->type(in(0)) == Type::TOP) return Type::TOP; | |
595 return tf()->range(); | |
596 } | |
597 | |
598 //------------------------------calling_convention----------------------------- | |
599 void CallNode::calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const { | |
600 // Use the standard compiler calling convention | |
601 Matcher::calling_convention( sig_bt, parm_regs, argcnt, true ); | |
602 } | |
603 | |
604 | |
605 //------------------------------match------------------------------------------ | |
606 // Construct projections for control, I/O, memory-fields, ..., and | |
607 // return result(s) along with their RegMask info | |
608 Node *CallNode::match( const ProjNode *proj, const Matcher *match ) { | |
609 switch (proj->_con) { | |
610 case TypeFunc::Control: | |
611 case TypeFunc::I_O: | |
612 case TypeFunc::Memory: | |
613 return new (match->C, 1) MachProjNode(this,proj->_con,RegMask::Empty,MachProjNode::unmatched_proj); | |
614 | |
615 case TypeFunc::Parms+1: // For LONG & DOUBLE returns | |
616 assert(tf()->_range->field_at(TypeFunc::Parms+1) == Type::HALF, ""); | |
617 // 2nd half of doubles and longs | |
618 return new (match->C, 1) MachProjNode(this,proj->_con, RegMask::Empty, (uint)OptoReg::Bad); | |
619 | |
620 case TypeFunc::Parms: { // Normal returns | |
621 uint ideal_reg = Matcher::base2reg[tf()->range()->field_at(TypeFunc::Parms)->base()]; | |
622 OptoRegPair regs = is_CallRuntime() | |
623 ? match->c_return_value(ideal_reg,true) // Calls into C runtime | |
624 : match-> return_value(ideal_reg,true); // Calls into compiled Java code | |
625 RegMask rm = RegMask(regs.first()); | |
626 if( OptoReg::is_valid(regs.second()) ) | |
627 rm.Insert( regs.second() ); | |
628 return new (match->C, 1) MachProjNode(this,proj->_con,rm,ideal_reg); | |
629 } | |
630 | |
631 case TypeFunc::ReturnAdr: | |
632 case TypeFunc::FramePtr: | |
633 default: | |
634 ShouldNotReachHere(); | |
635 } | |
636 return NULL; | |
637 } | |
638 | |
639 // Do we Match on this edge index or not? Match no edges | |
640 uint CallNode::match_edge(uint idx) const { | |
641 return 0; | |
642 } | |
643 | |
65 | 644 // |
74
2a9af0b9cb1c
6674600: (Escape Analysis) Optimize memory graph for instance's fields
kvn
parents:
66
diff
changeset
|
645 // Determine whether the call could modify the field of the specified |
2a9af0b9cb1c
6674600: (Escape Analysis) Optimize memory graph for instance's fields
kvn
parents:
66
diff
changeset
|
646 // instance at the specified offset. |
65 | 647 // |
648 bool CallNode::may_modify(const TypePtr *addr_t, PhaseTransform *phase) { | |
649 const TypeOopPtr *adrInst_t = addr_t->isa_oopptr(); | |
650 | |
247 | 651 // If not an OopPtr or not an instance type, assume the worst. |
652 // Note: currently this method is called only for instance types. | |
653 if (adrInst_t == NULL || !adrInst_t->is_known_instance()) { | |
65 | 654 return true; |
655 } | |
247 | 656 // The instance_id is set only for scalar-replaceable allocations which |
657 // are not passed as arguments according to Escape Analysis. | |
65 | 658 return false; |
659 } | |
660 | |
661 // Does this call have a direct reference to n other than debug information? | |
662 bool CallNode::has_non_debug_use(Node *n) { | |
663 const TypeTuple * d = tf()->domain(); | |
664 for (uint i = TypeFunc::Parms; i < d->cnt(); i++) { | |
665 Node *arg = in(i); | |
666 if (arg == n) { | |
667 return true; | |
668 } | |
669 } | |
670 return false; | |
671 } | |
672 | |
673 // Returns the unique CheckCastPP of a call | |
674 // or 'this' if there are several CheckCastPP | |
675 // or returns NULL if there is no one. | |
676 Node *CallNode::result_cast() { | |
677 Node *cast = NULL; | |
678 | |
679 Node *p = proj_out(TypeFunc::Parms); | |
680 if (p == NULL) | |
681 return NULL; | |
682 | |
683 for (DUIterator_Fast imax, i = p->fast_outs(imax); i < imax; i++) { | |
684 Node *use = p->fast_out(i); | |
685 if (use->is_CheckCastPP()) { | |
686 if (cast != NULL) { | |
687 return this; // more than 1 CheckCastPP | |
688 } | |
689 cast = use; | |
690 } | |
691 } | |
692 return cast; | |
693 } | |
694 | |
695 | |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
696 void CallNode::extract_projections(CallProjections* projs, bool separate_io_proj) { |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
697 projs->fallthrough_proj = NULL; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
698 projs->fallthrough_catchproj = NULL; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
699 projs->fallthrough_ioproj = NULL; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
700 projs->catchall_ioproj = NULL; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
701 projs->catchall_catchproj = NULL; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
702 projs->fallthrough_memproj = NULL; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
703 projs->catchall_memproj = NULL; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
704 projs->resproj = NULL; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
705 projs->exobj = NULL; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
706 |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
707 for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) { |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
708 ProjNode *pn = fast_out(i)->as_Proj(); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
709 if (pn->outcnt() == 0) continue; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
710 switch (pn->_con) { |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
711 case TypeFunc::Control: |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
712 { |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
713 // For Control (fallthrough) and I_O (catch_all_index) we have CatchProj -> Catch -> Proj |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
714 projs->fallthrough_proj = pn; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
715 DUIterator_Fast jmax, j = pn->fast_outs(jmax); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
716 const Node *cn = pn->fast_out(j); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
717 if (cn->is_Catch()) { |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
718 ProjNode *cpn = NULL; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
719 for (DUIterator_Fast kmax, k = cn->fast_outs(kmax); k < kmax; k++) { |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
720 cpn = cn->fast_out(k)->as_Proj(); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
721 assert(cpn->is_CatchProj(), "must be a CatchProjNode"); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
722 if (cpn->_con == CatchProjNode::fall_through_index) |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
723 projs->fallthrough_catchproj = cpn; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
724 else { |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
725 assert(cpn->_con == CatchProjNode::catch_all_index, "must be correct index."); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
726 projs->catchall_catchproj = cpn; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
727 } |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
728 } |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
729 } |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
730 break; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
731 } |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
732 case TypeFunc::I_O: |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
733 if (pn->_is_io_use) |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
734 projs->catchall_ioproj = pn; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
735 else |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
736 projs->fallthrough_ioproj = pn; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
737 for (DUIterator j = pn->outs(); pn->has_out(j); j++) { |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
738 Node* e = pn->out(j); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
739 if (e->Opcode() == Op_CreateEx && e->in(0)->is_CatchProj()) { |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
740 assert(projs->exobj == NULL, "only one"); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
741 projs->exobj = e; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
742 } |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
743 } |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
744 break; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
745 case TypeFunc::Memory: |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
746 if (pn->_is_io_use) |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
747 projs->catchall_memproj = pn; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
748 else |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
749 projs->fallthrough_memproj = pn; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
750 break; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
751 case TypeFunc::Parms: |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
752 projs->resproj = pn; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
753 break; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
754 default: |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
755 assert(false, "unexpected projection from allocation node."); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
756 } |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
757 } |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
758 |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
759 // The resproj may not exist because the result couuld be ignored |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
760 // and the exception object may not exist if an exception handler |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
761 // swallows the exception but all the other must exist and be found. |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
762 assert(projs->fallthrough_proj != NULL, "must be found"); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
763 assert(projs->fallthrough_catchproj != NULL, "must be found"); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
764 assert(projs->fallthrough_memproj != NULL, "must be found"); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
765 assert(projs->fallthrough_ioproj != NULL, "must be found"); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
766 assert(projs->catchall_catchproj != NULL, "must be found"); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
767 if (separate_io_proj) { |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
768 assert(projs->catchall_memproj != NULL, "must be found"); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
769 assert(projs->catchall_ioproj != NULL, "must be found"); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
770 } |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
771 } |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
772 |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
1040
diff
changeset
|
773 |
0 | 774 //============================================================================= |
775 uint CallJavaNode::size_of() const { return sizeof(*this); } | |
776 uint CallJavaNode::cmp( const Node &n ) const { | |
777 CallJavaNode &call = (CallJavaNode&)n; | |
778 return CallNode::cmp(call) && _method == call._method; | |
779 } | |
780 #ifndef PRODUCT | |
781 void CallJavaNode::dump_spec(outputStream *st) const { | |
782 if( _method ) _method->print_short_name(st); | |
783 CallNode::dump_spec(st); | |
784 } | |
785 #endif | |
786 | |
787 //============================================================================= | |
788 uint CallStaticJavaNode::size_of() const { return sizeof(*this); } | |
789 uint CallStaticJavaNode::cmp( const Node &n ) const { | |
790 CallStaticJavaNode &call = (CallStaticJavaNode&)n; | |
791 return CallJavaNode::cmp(call); | |
792 } | |
793 | |
794 //----------------------------uncommon_trap_request---------------------------- | |
795 // If this is an uncommon trap, return the request code, else zero. | |
796 int CallStaticJavaNode::uncommon_trap_request() const { | |
797 if (_name != NULL && !strcmp(_name, "uncommon_trap")) { | |
798 return extract_uncommon_trap_request(this); | |
799 } | |
800 return 0; | |
801 } | |
802 int CallStaticJavaNode::extract_uncommon_trap_request(const Node* call) { | |
803 #ifndef PRODUCT | |
804 if (!(call->req() > TypeFunc::Parms && | |
805 call->in(TypeFunc::Parms) != NULL && | |
806 call->in(TypeFunc::Parms)->is_Con())) { | |
807 assert(_in_dump_cnt != 0, "OK if dumping"); | |
808 tty->print("[bad uncommon trap]"); | |
809 return 0; | |
810 } | |
811 #endif | |
812 return call->in(TypeFunc::Parms)->bottom_type()->is_int()->get_con(); | |
813 } | |
814 | |
815 #ifndef PRODUCT | |
816 void CallStaticJavaNode::dump_spec(outputStream *st) const { | |
817 st->print("# Static "); | |
818 if (_name != NULL) { | |
819 st->print("%s", _name); | |
820 int trap_req = uncommon_trap_request(); | |
821 if (trap_req != 0) { | |
822 char buf[100]; | |
823 st->print("(%s)", | |
824 Deoptimization::format_trap_request(buf, sizeof(buf), | |
825 trap_req)); | |
826 } | |
827 st->print(" "); | |
828 } | |
829 CallJavaNode::dump_spec(st); | |
830 } | |
831 #endif | |
832 | |
833 //============================================================================= | |
834 uint CallDynamicJavaNode::size_of() const { return sizeof(*this); } | |
835 uint CallDynamicJavaNode::cmp( const Node &n ) const { | |
836 CallDynamicJavaNode &call = (CallDynamicJavaNode&)n; | |
837 return CallJavaNode::cmp(call); | |
838 } | |
839 #ifndef PRODUCT | |
840 void CallDynamicJavaNode::dump_spec(outputStream *st) const { | |
841 st->print("# Dynamic "); | |
842 CallJavaNode::dump_spec(st); | |
843 } | |
844 #endif | |
845 | |
846 //============================================================================= | |
847 uint CallRuntimeNode::size_of() const { return sizeof(*this); } | |
848 uint CallRuntimeNode::cmp( const Node &n ) const { | |
849 CallRuntimeNode &call = (CallRuntimeNode&)n; | |
850 return CallNode::cmp(call) && !strcmp(_name,call._name); | |
851 } | |
852 #ifndef PRODUCT | |
853 void CallRuntimeNode::dump_spec(outputStream *st) const { | |
854 st->print("# "); | |
855 st->print(_name); | |
856 CallNode::dump_spec(st); | |
857 } | |
858 #endif | |
859 | |
860 //------------------------------calling_convention----------------------------- | |
861 void CallRuntimeNode::calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const { | |
862 Matcher::c_calling_convention( sig_bt, parm_regs, argcnt ); | |
863 } | |
864 | |
865 //============================================================================= | |
866 //------------------------------calling_convention----------------------------- | |
867 | |
868 | |
869 //============================================================================= | |
870 #ifndef PRODUCT | |
871 void CallLeafNode::dump_spec(outputStream *st) const { | |
872 st->print("# "); | |
873 st->print(_name); | |
874 CallNode::dump_spec(st); | |
875 } | |
876 #endif | |
877 | |
878 //============================================================================= | |
879 | |
880 void SafePointNode::set_local(JVMState* jvms, uint idx, Node *c) { | |
881 assert(verify_jvms(jvms), "jvms must match"); | |
882 int loc = jvms->locoff() + idx; | |
883 if (in(loc)->is_top() && idx > 0 && !c->is_top() ) { | |
884 // If current local idx is top then local idx - 1 could | |
885 // be a long/double that needs to be killed since top could | |
886 // represent the 2nd half ofthe long/double. | |
887 uint ideal = in(loc -1)->ideal_reg(); | |
888 if (ideal == Op_RegD || ideal == Op_RegL) { | |
889 // set other (low index) half to top | |
890 set_req(loc - 1, in(loc)); | |
891 } | |
892 } | |
893 set_req(loc, c); | |
894 } | |
895 | |
896 uint SafePointNode::size_of() const { return sizeof(*this); } | |
897 uint SafePointNode::cmp( const Node &n ) const { | |
898 return (&n == this); // Always fail except on self | |
899 } | |
900 | |
901 //-------------------------set_next_exception---------------------------------- | |
902 void SafePointNode::set_next_exception(SafePointNode* n) { | |
903 assert(n == NULL || n->Opcode() == Op_SafePoint, "correct value for next_exception"); | |
904 if (len() == req()) { | |
905 if (n != NULL) add_prec(n); | |
906 } else { | |
907 set_prec(req(), n); | |
908 } | |
909 } | |
910 | |
911 | |
912 //----------------------------next_exception----------------------------------- | |
913 SafePointNode* SafePointNode::next_exception() const { | |
914 if (len() == req()) { | |
915 return NULL; | |
916 } else { | |
917 Node* n = in(req()); | |
918 assert(n == NULL || n->Opcode() == Op_SafePoint, "no other uses of prec edges"); | |
919 return (SafePointNode*) n; | |
920 } | |
921 } | |
922 | |
923 | |
924 //------------------------------Ideal------------------------------------------ | |
925 // Skip over any collapsed Regions | |
926 Node *SafePointNode::Ideal(PhaseGVN *phase, bool can_reshape) { | |
305 | 927 return remove_dead_region(phase, can_reshape) ? this : NULL; |
0 | 928 } |
929 | |
930 //------------------------------Identity--------------------------------------- | |
931 // Remove obviously duplicate safepoints | |
932 Node *SafePointNode::Identity( PhaseTransform *phase ) { | |
933 | |
934 // If you have back to back safepoints, remove one | |
935 if( in(TypeFunc::Control)->is_SafePoint() ) | |
936 return in(TypeFunc::Control); | |
937 | |
938 if( in(0)->is_Proj() ) { | |
939 Node *n0 = in(0)->in(0); | |
940 // Check if he is a call projection (except Leaf Call) | |
941 if( n0->is_Catch() ) { | |
942 n0 = n0->in(0)->in(0); | |
943 assert( n0->is_Call(), "expect a call here" ); | |
944 } | |
945 if( n0->is_Call() && n0->as_Call()->guaranteed_safepoint() ) { | |
946 // Useless Safepoint, so remove it | |
947 return in(TypeFunc::Control); | |
948 } | |
949 } | |
950 | |
951 return this; | |
952 } | |
953 | |
954 //------------------------------Value------------------------------------------ | |
955 const Type *SafePointNode::Value( PhaseTransform *phase ) const { | |
956 if( phase->type(in(0)) == Type::TOP ) return Type::TOP; | |
957 if( phase->eqv( in(0), this ) ) return Type::TOP; // Dead infinite loop | |
958 return Type::CONTROL; | |
959 } | |
960 | |
961 #ifndef PRODUCT | |
962 void SafePointNode::dump_spec(outputStream *st) const { | |
963 st->print(" SafePoint "); | |
964 } | |
965 #endif | |
966 | |
967 const RegMask &SafePointNode::in_RegMask(uint idx) const { | |
968 if( idx < TypeFunc::Parms ) return RegMask::Empty; | |
969 // Values outside the domain represent debug info | |
970 return *(Compile::current()->matcher()->idealreg2debugmask[in(idx)->ideal_reg()]); | |
971 } | |
972 const RegMask &SafePointNode::out_RegMask() const { | |
973 return RegMask::Empty; | |
974 } | |
975 | |
976 | |
977 void SafePointNode::grow_stack(JVMState* jvms, uint grow_by) { | |
978 assert((int)grow_by > 0, "sanity"); | |
979 int monoff = jvms->monoff(); | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
980 int scloff = jvms->scloff(); |
0 | 981 int endoff = jvms->endoff(); |
982 assert(endoff == (int)req(), "no other states or debug info after me"); | |
983 Node* top = Compile::current()->top(); | |
984 for (uint i = 0; i < grow_by; i++) { | |
985 ins_req(monoff, top); | |
986 } | |
987 jvms->set_monoff(monoff + grow_by); | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
988 jvms->set_scloff(scloff + grow_by); |
0 | 989 jvms->set_endoff(endoff + grow_by); |
990 } | |
991 | |
992 void SafePointNode::push_monitor(const FastLockNode *lock) { | |
993 // Add a LockNode, which points to both the original BoxLockNode (the | |
994 // stack space for the monitor) and the Object being locked. | |
995 const int MonitorEdges = 2; | |
996 assert(JVMState::logMonitorEdges == exact_log2(MonitorEdges), "correct MonitorEdges"); | |
997 assert(req() == jvms()->endoff(), "correct sizing"); | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
998 int nextmon = jvms()->scloff(); |
0 | 999 if (GenerateSynchronizationCode) { |
1000 add_req(lock->box_node()); | |
1001 add_req(lock->obj_node()); | |
1002 } else { | |
460
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
1003 Node* top = Compile::current()->top(); |
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
1004 add_req(top); |
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
1005 add_req(top); |
0 | 1006 } |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1007 jvms()->set_scloff(nextmon+MonitorEdges); |
0 | 1008 jvms()->set_endoff(req()); |
1009 } | |
1010 | |
1011 void SafePointNode::pop_monitor() { | |
1012 // Delete last monitor from debug info | |
1013 debug_only(int num_before_pop = jvms()->nof_monitors()); | |
1014 const int MonitorEdges = (1<<JVMState::logMonitorEdges); | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1015 int scloff = jvms()->scloff(); |
0 | 1016 int endoff = jvms()->endoff(); |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1017 int new_scloff = scloff - MonitorEdges; |
0 | 1018 int new_endoff = endoff - MonitorEdges; |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1019 jvms()->set_scloff(new_scloff); |
0 | 1020 jvms()->set_endoff(new_endoff); |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1021 while (scloff > new_scloff) del_req(--scloff); |
0 | 1022 assert(jvms()->nof_monitors() == num_before_pop-1, ""); |
1023 } | |
1024 | |
1025 Node *SafePointNode::peek_monitor_box() const { | |
1026 int mon = jvms()->nof_monitors() - 1; | |
1027 assert(mon >= 0, "most have a monitor"); | |
1028 return monitor_box(jvms(), mon); | |
1029 } | |
1030 | |
1031 Node *SafePointNode::peek_monitor_obj() const { | |
1032 int mon = jvms()->nof_monitors() - 1; | |
1033 assert(mon >= 0, "most have a monitor"); | |
1034 return monitor_obj(jvms(), mon); | |
1035 } | |
1036 | |
1037 // Do we Match on this edge index or not? Match no edges | |
1038 uint SafePointNode::match_edge(uint idx) const { | |
1039 if( !needs_polling_address_input() ) | |
1040 return 0; | |
1041 | |
1042 return (TypeFunc::Parms == idx); | |
1043 } | |
1044 | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1045 //============== SafePointScalarObjectNode ============== |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1046 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1047 SafePointScalarObjectNode::SafePointScalarObjectNode(const TypeOopPtr* tp, |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1048 #ifdef ASSERT |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1049 AllocateNode* alloc, |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1050 #endif |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1051 uint first_index, |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1052 uint n_fields) : |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1053 TypeNode(tp, 1), // 1 control input -- seems required. Get from root. |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1054 #ifdef ASSERT |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1055 _alloc(alloc), |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1056 #endif |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1057 _first_index(first_index), |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1058 _n_fields(n_fields) |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1059 { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1060 init_class_id(Class_SafePointScalarObject); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1061 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1062 |
420
a1980da045cc
6462850: generate biased locking code in C2 ideal graph
kvn
parents:
366
diff
changeset
|
1063 bool SafePointScalarObjectNode::pinned() const { return true; } |
601
523ded093c31
6809798: SafePointScalarObject node placed into incorrect block during GCM
kvn
parents:
460
diff
changeset
|
1064 bool SafePointScalarObjectNode::depends_only_on_test() const { return false; } |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1065 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1066 uint SafePointScalarObjectNode::ideal_reg() const { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1067 return 0; // No matching to machine instruction |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1068 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1069 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1070 const RegMask &SafePointScalarObjectNode::in_RegMask(uint idx) const { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1071 return *(Compile::current()->matcher()->idealreg2debugmask[in(idx)->ideal_reg()]); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1072 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1073 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1074 const RegMask &SafePointScalarObjectNode::out_RegMask() const { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1075 return RegMask::Empty; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1076 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1077 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1078 uint SafePointScalarObjectNode::match_edge(uint idx) const { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1079 return 0; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1080 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1081 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1082 SafePointScalarObjectNode* |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1083 SafePointScalarObjectNode::clone(int jvms_adj, Dict* sosn_map) const { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1084 void* cached = (*sosn_map)[(void*)this]; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1085 if (cached != NULL) { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1086 return (SafePointScalarObjectNode*)cached; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1087 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1088 Compile* C = Compile::current(); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1089 SafePointScalarObjectNode* res = (SafePointScalarObjectNode*)Node::clone(); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1090 res->_first_index += jvms_adj; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1091 sosn_map->Insert((void*)this, (void*)res); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1092 return res; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1093 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1094 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1095 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1096 #ifndef PRODUCT |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1097 void SafePointScalarObjectNode::dump_spec(outputStream *st) const { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1098 st->print(" # fields@[%d..%d]", first_index(), |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1099 first_index() + n_fields() - 1); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1100 } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1101 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1102 #endif |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
1103 |
0 | 1104 //============================================================================= |
1105 uint AllocateNode::size_of() const { return sizeof(*this); } | |
1106 | |
1107 AllocateNode::AllocateNode(Compile* C, const TypeFunc *atype, | |
1108 Node *ctrl, Node *mem, Node *abio, | |
1109 Node *size, Node *klass_node, Node *initial_test) | |
1110 : CallNode(atype, NULL, TypeRawPtr::BOTTOM) | |
1111 { | |
1112 init_class_id(Class_Allocate); | |
1113 init_flags(Flag_is_macro); | |
39
76256d272075
6667612: (Escape Analysis) disable loop cloning if it has a scalar replaceable allocation
kvn
parents:
0
diff
changeset
|
1114 _is_scalar_replaceable = false; |
0 | 1115 Node *topnode = C->top(); |
1116 | |
1117 init_req( TypeFunc::Control , ctrl ); | |
1118 init_req( TypeFunc::I_O , abio ); | |
1119 init_req( TypeFunc::Memory , mem ); | |
1120 init_req( TypeFunc::ReturnAdr, topnode ); | |
1121 init_req( TypeFunc::FramePtr , topnode ); | |
1122 init_req( AllocSize , size); | |
1123 init_req( KlassNode , klass_node); | |
1124 init_req( InitialTest , initial_test); | |
1125 init_req( ALength , topnode); | |
1126 C->add_macro_node(this); | |
1127 } | |
1128 | |
1129 //============================================================================= | |
1130 uint AllocateArrayNode::size_of() const { return sizeof(*this); } | |
1131 | |
704
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1132 Node* AllocateArrayNode::Ideal(PhaseGVN *phase, bool can_reshape) { |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1133 if (remove_dead_region(phase, can_reshape)) return this; |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1134 |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1135 const Type* type = phase->type(Ideal_length()); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1136 if (type->isa_int() && type->is_int()->_hi < 0) { |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1137 if (can_reshape) { |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1138 PhaseIterGVN *igvn = phase->is_IterGVN(); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1139 // Unreachable fall through path (negative array length), |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1140 // the allocation can only throw so disconnect it. |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1141 Node* proj = proj_out(TypeFunc::Control); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1142 Node* catchproj = NULL; |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1143 if (proj != NULL) { |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1144 for (DUIterator_Fast imax, i = proj->fast_outs(imax); i < imax; i++) { |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1145 Node *cn = proj->fast_out(i); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1146 if (cn->is_Catch()) { |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1147 catchproj = cn->as_Multi()->proj_out(CatchProjNode::fall_through_index); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1148 break; |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1149 } |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1150 } |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1151 } |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1152 if (catchproj != NULL && catchproj->outcnt() > 0 && |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1153 (catchproj->outcnt() > 1 || |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1154 catchproj->unique_out()->Opcode() != Op_Halt)) { |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1155 assert(catchproj->is_CatchProj(), "must be a CatchProjNode"); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1156 Node* nproj = catchproj->clone(); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1157 igvn->register_new_node_with_optimizer(nproj); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1158 |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1159 Node *frame = new (phase->C, 1) ParmNode( phase->C->start(), TypeFunc::FramePtr ); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1160 frame = phase->transform(frame); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1161 // Halt & Catch Fire |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1162 Node *halt = new (phase->C, TypeFunc::Parms) HaltNode( nproj, frame ); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1163 phase->C->root()->add_req(halt); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1164 phase->transform(halt); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1165 |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1166 igvn->replace_node(catchproj, phase->C->top()); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1167 return this; |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1168 } |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1169 } else { |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1170 // Can't correct it during regular GVN so register for IGVN |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1171 phase->C->record_for_igvn(this); |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1172 } |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1173 } |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1174 return NULL; |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1175 } |
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
1176 |
366
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1177 // Retrieve the length from the AllocateArrayNode. Narrow the type with a |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1178 // CastII, if appropriate. If we are not allowed to create new nodes, and |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1179 // a CastII is appropriate, return NULL. |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1180 Node *AllocateArrayNode::make_ideal_length(const TypeOopPtr* oop_type, PhaseTransform *phase, bool allow_new_nodes) { |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1181 Node *length = in(AllocateNode::ALength); |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1182 assert(length != NULL, "length is not null"); |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1183 |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1184 const TypeInt* length_type = phase->find_int_type(length); |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1185 const TypeAryPtr* ary_type = oop_type->isa_aryptr(); |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1186 |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1187 if (ary_type != NULL && length_type != NULL) { |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1188 const TypeInt* narrow_length_type = ary_type->narrow_size_type(length_type); |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1189 if (narrow_length_type != length_type) { |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1190 // Assert one of: |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1191 // - the narrow_length is 0 |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1192 // - the narrow_length is not wider than length |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1193 assert(narrow_length_type == TypeInt::ZERO || |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1194 (narrow_length_type->_hi <= length_type->_hi && |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1195 narrow_length_type->_lo >= length_type->_lo), |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1196 "narrow type must be narrower than length type"); |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1197 |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1198 // Return NULL if new nodes are not allowed |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1199 if (!allow_new_nodes) return NULL; |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1200 // Create a cast which is control dependent on the initialization to |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1201 // propagate the fact that the array length must be positive. |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1202 length = new (phase->C, 2) CastIINode(length, narrow_length_type); |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1203 length->set_req(0, initialization()->proj_out(0)); |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1204 } |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1205 } |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1206 |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1207 return length; |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1208 } |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
331
diff
changeset
|
1209 |
0 | 1210 //============================================================================= |
1211 uint LockNode::size_of() const { return sizeof(*this); } | |
1212 | |
1213 // Redundant lock elimination | |
1214 // | |
1215 // There are various patterns of locking where we release and | |
1216 // immediately reacquire a lock in a piece of code where no operations | |
1217 // occur in between that would be observable. In those cases we can | |
1218 // skip releasing and reacquiring the lock without violating any | |
1219 // fairness requirements. Doing this around a loop could cause a lock | |
1220 // to be held for a very long time so we concentrate on non-looping | |
1221 // control flow. We also require that the operations are fully | |
1222 // redundant meaning that we don't introduce new lock operations on | |
1223 // some paths so to be able to eliminate it on others ala PRE. This | |
1224 // would probably require some more extensive graph manipulation to | |
1225 // guarantee that the memory edges were all handled correctly. | |
1226 // | |
1227 // Assuming p is a simple predicate which can't trap in any way and s | |
1228 // is a synchronized method consider this code: | |
1229 // | |
1230 // s(); | |
1231 // if (p) | |
1232 // s(); | |
1233 // else | |
1234 // s(); | |
1235 // s(); | |
1236 // | |
1237 // 1. The unlocks of the first call to s can be eliminated if the | |
1238 // locks inside the then and else branches are eliminated. | |
1239 // | |
1240 // 2. The unlocks of the then and else branches can be eliminated if | |
1241 // the lock of the final call to s is eliminated. | |
1242 // | |
1243 // Either of these cases subsumes the simple case of sequential control flow | |
1244 // | |
1245 // Addtionally we can eliminate versions without the else case: | |
1246 // | |
1247 // s(); | |
1248 // if (p) | |
1249 // s(); | |
1250 // s(); | |
1251 // | |
1252 // 3. In this case we eliminate the unlock of the first s, the lock | |
1253 // and unlock in the then case and the lock in the final s. | |
1254 // | |
1255 // Note also that in all these cases the then/else pieces don't have | |
1256 // to be trivial as long as they begin and end with synchronization | |
1257 // operations. | |
1258 // | |
1259 // s(); | |
1260 // if (p) | |
1261 // s(); | |
1262 // f(); | |
1263 // s(); | |
1264 // s(); | |
1265 // | |
1266 // The code will work properly for this case, leaving in the unlock | |
1267 // before the call to f and the relock after it. | |
1268 // | |
1269 // A potentially interesting case which isn't handled here is when the | |
1270 // locking is partially redundant. | |
1271 // | |
1272 // s(); | |
1273 // if (p) | |
1274 // s(); | |
1275 // | |
1276 // This could be eliminated putting unlocking on the else case and | |
1277 // eliminating the first unlock and the lock in the then side. | |
1278 // Alternatively the unlock could be moved out of the then side so it | |
1279 // was after the merge and the first unlock and second lock | |
1280 // eliminated. This might require less manipulation of the memory | |
1281 // state to get correct. | |
1282 // | |
1283 // Additionally we might allow work between a unlock and lock before | |
1284 // giving up eliminating the locks. The current code disallows any | |
1285 // conditional control flow between these operations. A formulation | |
1286 // similar to partial redundancy elimination computing the | |
1287 // availability of unlocking and the anticipatability of locking at a | |
1288 // program point would allow detection of fully redundant locking with | |
1289 // some amount of work in between. I'm not sure how often I really | |
1290 // think that would occur though. Most of the cases I've seen | |
1291 // indicate it's likely non-trivial work would occur in between. | |
1292 // There may be other more complicated constructs where we could | |
1293 // eliminate locking but I haven't seen any others appear as hot or | |
1294 // interesting. | |
1295 // | |
1296 // Locking and unlocking have a canonical form in ideal that looks | |
1297 // roughly like this: | |
1298 // | |
1299 // <obj> | |
1300 // | \\------+ | |
1301 // | \ \ | |
1302 // | BoxLock \ | |
1303 // | | | \ | |
1304 // | | \ \ | |
1305 // | | FastLock | |
1306 // | | / | |
1307 // | | / | |
1308 // | | | | |
1309 // | |
1310 // Lock | |
1311 // | | |
1312 // Proj #0 | |
1313 // | | |
1314 // MembarAcquire | |
1315 // | | |
1316 // Proj #0 | |
1317 // | |
1318 // MembarRelease | |
1319 // | | |
1320 // Proj #0 | |
1321 // | | |
1322 // Unlock | |
1323 // | | |
1324 // Proj #0 | |
1325 // | |
1326 // | |
1327 // This code proceeds by processing Lock nodes during PhaseIterGVN | |
1328 // and searching back through its control for the proper code | |
1329 // patterns. Once it finds a set of lock and unlock operations to | |
1330 // eliminate they are marked as eliminatable which causes the | |
1331 // expansion of the Lock and Unlock macro nodes to make the operation a NOP | |
1332 // | |
1333 //============================================================================= | |
1334 | |
1335 // | |
1336 // Utility function to skip over uninteresting control nodes. Nodes skipped are: | |
1337 // - copy regions. (These may not have been optimized away yet.) | |
1338 // - eliminated locking nodes | |
1339 // | |
1340 static Node *next_control(Node *ctrl) { | |
1341 if (ctrl == NULL) | |
1342 return NULL; | |
1343 while (1) { | |
1344 if (ctrl->is_Region()) { | |
1345 RegionNode *r = ctrl->as_Region(); | |
1346 Node *n = r->is_copy(); | |
1347 if (n == NULL) | |
1348 break; // hit a region, return it | |
1349 else | |
1350 ctrl = n; | |
1351 } else if (ctrl->is_Proj()) { | |
1352 Node *in0 = ctrl->in(0); | |
1353 if (in0->is_AbstractLock() && in0->as_AbstractLock()->is_eliminated()) { | |
1354 ctrl = in0->in(0); | |
1355 } else { | |
1356 break; | |
1357 } | |
1358 } else { | |
1359 break; // found an interesting control | |
1360 } | |
1361 } | |
1362 return ctrl; | |
1363 } | |
1364 // | |
1365 // Given a control, see if it's the control projection of an Unlock which | |
1366 // operating on the same object as lock. | |
1367 // | |
1368 bool AbstractLockNode::find_matching_unlock(const Node* ctrl, LockNode* lock, | |
1369 GrowableArray<AbstractLockNode*> &lock_ops) { | |
1370 ProjNode *ctrl_proj = (ctrl->is_Proj()) ? ctrl->as_Proj() : NULL; | |
1371 if (ctrl_proj != NULL && ctrl_proj->_con == TypeFunc::Control) { | |
1372 Node *n = ctrl_proj->in(0); | |
1373 if (n != NULL && n->is_Unlock()) { | |
1374 UnlockNode *unlock = n->as_Unlock(); | |
1375 if ((lock->obj_node() == unlock->obj_node()) && | |
1376 (lock->box_node() == unlock->box_node()) && !unlock->is_eliminated()) { | |
1377 lock_ops.append(unlock); | |
1378 return true; | |
1379 } | |
1380 } | |
1381 } | |
1382 return false; | |
1383 } | |
1384 | |
1385 // | |
1386 // Find the lock matching an unlock. Returns null if a safepoint | |
1387 // or complicated control is encountered first. | |
1388 LockNode *AbstractLockNode::find_matching_lock(UnlockNode* unlock) { | |
1389 LockNode *lock_result = NULL; | |
1390 // find the matching lock, or an intervening safepoint | |
1391 Node *ctrl = next_control(unlock->in(0)); | |
1392 while (1) { | |
1393 assert(ctrl != NULL, "invalid control graph"); | |
1394 assert(!ctrl->is_Start(), "missing lock for unlock"); | |
1395 if (ctrl->is_top()) break; // dead control path | |
1396 if (ctrl->is_Proj()) ctrl = ctrl->in(0); | |
1397 if (ctrl->is_SafePoint()) { | |
1398 break; // found a safepoint (may be the lock we are searching for) | |
1399 } else if (ctrl->is_Region()) { | |
1400 // Check for a simple diamond pattern. Punt on anything more complicated | |
1401 if (ctrl->req() == 3 && ctrl->in(1) != NULL && ctrl->in(2) != NULL) { | |
1402 Node *in1 = next_control(ctrl->in(1)); | |
1403 Node *in2 = next_control(ctrl->in(2)); | |
1404 if (((in1->is_IfTrue() && in2->is_IfFalse()) || | |
1405 (in2->is_IfTrue() && in1->is_IfFalse())) && (in1->in(0) == in2->in(0))) { | |
1406 ctrl = next_control(in1->in(0)->in(0)); | |
1407 } else { | |
1408 break; | |
1409 } | |
1410 } else { | |
1411 break; | |
1412 } | |
1413 } else { | |
1414 ctrl = next_control(ctrl->in(0)); // keep searching | |
1415 } | |
1416 } | |
1417 if (ctrl->is_Lock()) { | |
1418 LockNode *lock = ctrl->as_Lock(); | |
1419 if ((lock->obj_node() == unlock->obj_node()) && | |
1420 (lock->box_node() == unlock->box_node())) { | |
1421 lock_result = lock; | |
1422 } | |
1423 } | |
1424 return lock_result; | |
1425 } | |
1426 | |
1427 // This code corresponds to case 3 above. | |
1428 | |
1429 bool AbstractLockNode::find_lock_and_unlock_through_if(Node* node, LockNode* lock, | |
1430 GrowableArray<AbstractLockNode*> &lock_ops) { | |
1431 Node* if_node = node->in(0); | |
1432 bool if_true = node->is_IfTrue(); | |
1433 | |
1434 if (if_node->is_If() && if_node->outcnt() == 2 && (if_true || node->is_IfFalse())) { | |
1435 Node *lock_ctrl = next_control(if_node->in(0)); | |
1436 if (find_matching_unlock(lock_ctrl, lock, lock_ops)) { | |
1437 Node* lock1_node = NULL; | |
1438 ProjNode* proj = if_node->as_If()->proj_out(!if_true); | |
1439 if (if_true) { | |
1440 if (proj->is_IfFalse() && proj->outcnt() == 1) { | |
1441 lock1_node = proj->unique_out(); | |
1442 } | |
1443 } else { | |
1444 if (proj->is_IfTrue() && proj->outcnt() == 1) { | |
1445 lock1_node = proj->unique_out(); | |
1446 } | |
1447 } | |
1448 if (lock1_node != NULL && lock1_node->is_Lock()) { | |
1449 LockNode *lock1 = lock1_node->as_Lock(); | |
1450 if ((lock->obj_node() == lock1->obj_node()) && | |
1451 (lock->box_node() == lock1->box_node()) && !lock1->is_eliminated()) { | |
1452 lock_ops.append(lock1); | |
1453 return true; | |
1454 } | |
1455 } | |
1456 } | |
1457 } | |
1458 | |
1459 lock_ops.trunc_to(0); | |
1460 return false; | |
1461 } | |
1462 | |
1463 bool AbstractLockNode::find_unlocks_for_region(const RegionNode* region, LockNode* lock, | |
1464 GrowableArray<AbstractLockNode*> &lock_ops) { | |
1465 // check each control merging at this point for a matching unlock. | |
1466 // in(0) should be self edge so skip it. | |
1467 for (int i = 1; i < (int)region->req(); i++) { | |
1468 Node *in_node = next_control(region->in(i)); | |
1469 if (in_node != NULL) { | |
1470 if (find_matching_unlock(in_node, lock, lock_ops)) { | |
1471 // found a match so keep on checking. | |
1472 continue; | |
1473 } else if (find_lock_and_unlock_through_if(in_node, lock, lock_ops)) { | |
1474 continue; | |
1475 } | |
1476 | |
1477 // If we fall through to here then it was some kind of node we | |
1478 // don't understand or there wasn't a matching unlock, so give | |
1479 // up trying to merge locks. | |
1480 lock_ops.trunc_to(0); | |
1481 return false; | |
1482 } | |
1483 } | |
1484 return true; | |
1485 | |
1486 } | |
1487 | |
1488 #ifndef PRODUCT | |
1489 // | |
1490 // Create a counter which counts the number of times this lock is acquired | |
1491 // | |
1492 void AbstractLockNode::create_lock_counter(JVMState* state) { | |
1493 _counter = OptoRuntime::new_named_counter(state, NamedCounter::LockCounter); | |
1494 } | |
1495 #endif | |
1496 | |
1497 void AbstractLockNode::set_eliminated() { | |
1498 _eliminate = true; | |
1499 #ifndef PRODUCT | |
1500 if (_counter) { | |
1501 // Update the counter to indicate that this lock was eliminated. | |
1502 // The counter update code will stay around even though the | |
1503 // optimizer will eliminate the lock operation itself. | |
1504 _counter->set_tag(NamedCounter::EliminatedLockCounter); | |
1505 } | |
1506 #endif | |
1507 } | |
1508 | |
1509 //============================================================================= | |
1510 Node *LockNode::Ideal(PhaseGVN *phase, bool can_reshape) { | |
1511 | |
66
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1512 // perform any generic optimizations first (returns 'this' or NULL) |
0 | 1513 Node *result = SafePointNode::Ideal(phase, can_reshape); |
1514 | |
1515 // Now see if we can optimize away this lock. We don't actually | |
1516 // remove the locking here, we simply set the _eliminate flag which | |
1517 // prevents macro expansion from expanding the lock. Since we don't | |
1518 // modify the graph, the value returned from this function is the | |
1519 // one computed above. | |
66
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1520 if (result == NULL && can_reshape && EliminateLocks && !is_eliminated()) { |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1521 // |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1522 // If we are locking an unescaped object, the lock/unlock is unnecessary |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1523 // |
460
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
1524 ConnectionGraph *cgr = phase->C->congraph(); |
66
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1525 PointsToNode::EscapeState es = PointsToNode::GlobalEscape; |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1526 if (cgr != NULL) |
1634
60a14ad85270
6966411: escape.cpp:450 assert(base->Opcode() == Op_ConP
kvn
parents:
1552
diff
changeset
|
1527 es = cgr->escape_state(obj_node()); |
66
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1528 if (es != PointsToNode::UnknownEscape && es != PointsToNode::GlobalEscape) { |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1529 // Mark it eliminated to update any counters |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1530 this->set_eliminated(); |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1531 return result; |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1532 } |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1533 |
0 | 1534 // |
1535 // Try lock coarsening | |
1536 // | |
1537 PhaseIterGVN* iter = phase->is_IterGVN(); | |
1538 if (iter != NULL) { | |
1539 | |
1540 GrowableArray<AbstractLockNode*> lock_ops; | |
1541 | |
1542 Node *ctrl = next_control(in(0)); | |
1543 | |
1544 // now search back for a matching Unlock | |
1545 if (find_matching_unlock(ctrl, this, lock_ops)) { | |
1546 // found an unlock directly preceding this lock. This is the | |
1547 // case of single unlock directly control dependent on a | |
1548 // single lock which is the trivial version of case 1 or 2. | |
1549 } else if (ctrl->is_Region() ) { | |
1550 if (find_unlocks_for_region(ctrl->as_Region(), this, lock_ops)) { | |
1551 // found lock preceded by multiple unlocks along all paths | |
1552 // joining at this point which is case 3 in description above. | |
1553 } | |
1554 } else { | |
1555 // see if this lock comes from either half of an if and the | |
1556 // predecessors merges unlocks and the other half of the if | |
1557 // performs a lock. | |
1558 if (find_lock_and_unlock_through_if(ctrl, this, lock_ops)) { | |
1559 // found unlock splitting to an if with locks on both branches. | |
1560 } | |
1561 } | |
1562 | |
1563 if (lock_ops.length() > 0) { | |
1564 // add ourselves to the list of locks to be eliminated. | |
1565 lock_ops.append(this); | |
1566 | |
1567 #ifndef PRODUCT | |
1568 if (PrintEliminateLocks) { | |
1569 int locks = 0; | |
1570 int unlocks = 0; | |
1571 for (int i = 0; i < lock_ops.length(); i++) { | |
1572 AbstractLockNode* lock = lock_ops.at(i); | |
66
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1573 if (lock->Opcode() == Op_Lock) |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1574 locks++; |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1575 else |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1576 unlocks++; |
0 | 1577 if (Verbose) { |
1578 lock->dump(1); | |
1579 } | |
1580 } | |
1581 tty->print_cr("***Eliminated %d unlocks and %d locks", unlocks, locks); | |
1582 } | |
1583 #endif | |
1584 | |
1585 // for each of the identified locks, mark them | |
1586 // as eliminatable | |
1587 for (int i = 0; i < lock_ops.length(); i++) { | |
1588 AbstractLockNode* lock = lock_ops.at(i); | |
1589 | |
1590 // Mark it eliminated to update any counters | |
1591 lock->set_eliminated(); | |
460
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
1592 lock->set_coarsened(); |
0 | 1593 } |
1594 } else if (result != NULL && ctrl->is_Region() && | |
1595 iter->_worklist.member(ctrl)) { | |
1596 // We weren't able to find any opportunities but the region this | |
1597 // lock is control dependent on hasn't been processed yet so put | |
1598 // this lock back on the worklist so we can check again once any | |
1599 // region simplification has occurred. | |
1600 iter->_worklist.push(this); | |
1601 } | |
1602 } | |
1603 } | |
1604 | |
1605 return result; | |
1606 } | |
1607 | |
1608 //============================================================================= | |
1609 uint UnlockNode::size_of() const { return sizeof(*this); } | |
1610 | |
1611 //============================================================================= | |
1612 Node *UnlockNode::Ideal(PhaseGVN *phase, bool can_reshape) { | |
1613 | |
66
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1614 // perform any generic optimizations first (returns 'this' or NULL) |
0 | 1615 Node * result = SafePointNode::Ideal(phase, can_reshape); |
1616 | |
1617 // Now see if we can optimize away this unlock. We don't actually | |
1618 // remove the unlocking here, we simply set the _eliminate flag which | |
1619 // prevents macro expansion from expanding the unlock. Since we don't | |
1620 // modify the graph, the value returned from this function is the | |
1621 // one computed above. | |
66
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1622 // Escape state is defined after Parse phase. |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1623 if (result == NULL && can_reshape && EliminateLocks && !is_eliminated()) { |
0 | 1624 // |
66
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1625 // If we are unlocking an unescaped object, the lock/unlock is unnecessary. |
0 | 1626 // |
460
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
1627 ConnectionGraph *cgr = phase->C->congraph(); |
66
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1628 PointsToNode::EscapeState es = PointsToNode::GlobalEscape; |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1629 if (cgr != NULL) |
1634
60a14ad85270
6966411: escape.cpp:450 assert(base->Opcode() == Op_ConP
kvn
parents:
1552
diff
changeset
|
1630 es = cgr->escape_state(obj_node()); |
66
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1631 if (es != PointsToNode::UnknownEscape && es != PointsToNode::GlobalEscape) { |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1632 // Mark it eliminated to update any counters |
6dbf1a175d6b
6672848: (Escape Analysis) improve lock elimination with EA
kvn
parents:
65
diff
changeset
|
1633 this->set_eliminated(); |
0 | 1634 } |
1635 } | |
1636 return result; | |
1637 } |