Mercurial > hg > truffle
annotate src/share/vm/opto/callnode.hpp @ 1145:e018e6884bd8
6631166: CMS: better heuristics when combatting fragmentation
Summary: Autonomic per-worker free block cache sizing, tunable coalition policies, fixes to per-size block statistics, retuned gain and bandwidth of some feedback loop filters to allow quicker reactivity to abrupt changes in ambient demand, and other heuristics to reduce fragmentation of the CMS old gen. Also tightened some assertions, including those related to locking.
Reviewed-by: jmasa
author | ysr |
---|---|
date | Wed, 23 Dec 2009 09:23:54 -0800 |
parents | 7c57aead6d3e |
children | 97125851f396 |
rev | line source |
---|---|
0 | 1 /* |
844 | 2 * Copyright 1997-2009 Sun Microsystems, Inc. All Rights Reserved. |
0 | 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 * | |
5 * This code is free software; you can redistribute it and/or modify it | |
6 * under the terms of the GNU General Public License version 2 only, as | |
7 * published by the Free Software Foundation. | |
8 * | |
9 * This code is distributed in the hope that it will be useful, but WITHOUT | |
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
12 * version 2 for more details (a copy is included in the LICENSE file that | |
13 * accompanied this code). | |
14 * | |
15 * You should have received a copy of the GNU General Public License version | |
16 * 2 along with this work; if not, write to the Free Software Foundation, | |
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. | |
18 * | |
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, | |
20 * CA 95054 USA or visit www.sun.com if you need additional information or | |
21 * have any questions. | |
22 * | |
23 */ | |
24 | |
25 // Portions of code courtesy of Clifford Click | |
26 | |
27 // Optimization - Graph Style | |
28 | |
29 class Chaitin; | |
30 class NamedCounter; | |
31 class MultiNode; | |
32 class SafePointNode; | |
33 class CallNode; | |
34 class CallJavaNode; | |
35 class CallStaticJavaNode; | |
36 class CallDynamicJavaNode; | |
37 class CallRuntimeNode; | |
38 class CallLeafNode; | |
39 class CallLeafNoFPNode; | |
40 class AllocateNode; | |
33 | 41 class AllocateArrayNode; |
0 | 42 class LockNode; |
43 class UnlockNode; | |
44 class JVMState; | |
45 class OopMap; | |
46 class State; | |
47 class StartNode; | |
48 class MachCallNode; | |
49 class FastLockNode; | |
50 | |
51 //------------------------------StartNode-------------------------------------- | |
52 // The method start node | |
53 class StartNode : public MultiNode { | |
54 virtual uint cmp( const Node &n ) const; | |
55 virtual uint size_of() const; // Size is bigger | |
56 public: | |
57 const TypeTuple *_domain; | |
58 StartNode( Node *root, const TypeTuple *domain ) : MultiNode(2), _domain(domain) { | |
59 init_class_id(Class_Start); | |
60 init_flags(Flag_is_block_start); | |
61 init_req(0,this); | |
62 init_req(1,root); | |
63 } | |
64 virtual int Opcode() const; | |
65 virtual bool pinned() const { return true; }; | |
66 virtual const Type *bottom_type() const; | |
67 virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; } | |
68 virtual const Type *Value( PhaseTransform *phase ) const; | |
69 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | |
70 virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_reg, uint length ) const; | |
71 virtual const RegMask &in_RegMask(uint) const; | |
72 virtual Node *match( const ProjNode *proj, const Matcher *m ); | |
73 virtual uint ideal_reg() const { return 0; } | |
74 #ifndef PRODUCT | |
75 virtual void dump_spec(outputStream *st) const; | |
76 #endif | |
77 }; | |
78 | |
79 //------------------------------StartOSRNode----------------------------------- | |
80 // The method start node for on stack replacement code | |
81 class StartOSRNode : public StartNode { | |
82 public: | |
83 StartOSRNode( Node *root, const TypeTuple *domain ) : StartNode(root, domain) {} | |
84 virtual int Opcode() const; | |
85 static const TypeTuple *osr_domain(); | |
86 }; | |
87 | |
88 | |
89 //------------------------------ParmNode--------------------------------------- | |
90 // Incoming parameters | |
91 class ParmNode : public ProjNode { | |
92 static const char * const names[TypeFunc::Parms+1]; | |
93 public: | |
33 | 94 ParmNode( StartNode *src, uint con ) : ProjNode(src,con) { |
95 init_class_id(Class_Parm); | |
96 } | |
0 | 97 virtual int Opcode() const; |
98 virtual bool is_CFG() const { return (_con == TypeFunc::Control); } | |
99 virtual uint ideal_reg() const; | |
100 #ifndef PRODUCT | |
101 virtual void dump_spec(outputStream *st) const; | |
102 #endif | |
103 }; | |
104 | |
105 | |
106 //------------------------------ReturnNode------------------------------------- | |
107 // Return from subroutine node | |
108 class ReturnNode : public Node { | |
109 public: | |
110 ReturnNode( uint edges, Node *cntrl, Node *i_o, Node *memory, Node *retadr, Node *frameptr ); | |
111 virtual int Opcode() const; | |
112 virtual bool is_CFG() const { return true; } | |
113 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash | |
114 virtual bool depends_only_on_test() const { return false; } | |
115 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | |
116 virtual const Type *Value( PhaseTransform *phase ) const; | |
117 virtual uint ideal_reg() const { return NotAMachineReg; } | |
118 virtual uint match_edge(uint idx) const; | |
119 #ifndef PRODUCT | |
120 virtual void dump_req() const; | |
121 #endif | |
122 }; | |
123 | |
124 | |
125 //------------------------------RethrowNode------------------------------------ | |
126 // Rethrow of exception at call site. Ends a procedure before rethrowing; | |
127 // ends the current basic block like a ReturnNode. Restores registers and | |
128 // unwinds stack. Rethrow happens in the caller's method. | |
129 class RethrowNode : public Node { | |
130 public: | |
131 RethrowNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *ret_adr, Node *exception ); | |
132 virtual int Opcode() const; | |
133 virtual bool is_CFG() const { return true; } | |
134 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash | |
135 virtual bool depends_only_on_test() const { return false; } | |
136 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | |
137 virtual const Type *Value( PhaseTransform *phase ) const; | |
138 virtual uint match_edge(uint idx) const; | |
139 virtual uint ideal_reg() const { return NotAMachineReg; } | |
140 #ifndef PRODUCT | |
141 virtual void dump_req() const; | |
142 #endif | |
143 }; | |
144 | |
145 | |
146 //------------------------------TailCallNode----------------------------------- | |
147 // Pop stack frame and jump indirect | |
148 class TailCallNode : public ReturnNode { | |
149 public: | |
150 TailCallNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *retadr, Node *target, Node *moop ) | |
151 : ReturnNode( TypeFunc::Parms+2, cntrl, i_o, memory, frameptr, retadr ) { | |
152 init_req(TypeFunc::Parms, target); | |
153 init_req(TypeFunc::Parms+1, moop); | |
154 } | |
155 | |
156 virtual int Opcode() const; | |
157 virtual uint match_edge(uint idx) const; | |
158 }; | |
159 | |
160 //------------------------------TailJumpNode----------------------------------- | |
161 // Pop stack frame and jump indirect | |
162 class TailJumpNode : public ReturnNode { | |
163 public: | |
164 TailJumpNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *target, Node *ex_oop) | |
165 : ReturnNode(TypeFunc::Parms+2, cntrl, i_o, memory, frameptr, Compile::current()->top()) { | |
166 init_req(TypeFunc::Parms, target); | |
167 init_req(TypeFunc::Parms+1, ex_oop); | |
168 } | |
169 | |
170 virtual int Opcode() const; | |
171 virtual uint match_edge(uint idx) const; | |
172 }; | |
173 | |
174 //-------------------------------JVMState------------------------------------- | |
175 // A linked list of JVMState nodes captures the whole interpreter state, | |
176 // plus GC roots, for all active calls at some call site in this compilation | |
177 // unit. (If there is no inlining, then the list has exactly one link.) | |
178 // This provides a way to map the optimized program back into the interpreter, | |
179 // or to let the GC mark the stack. | |
180 class JVMState : public ResourceObj { | |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
181 public: |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
182 typedef enum { |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
183 Reexecute_Undefined = -1, // not defined -- will be translated into false later |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
184 Reexecute_False = 0, // false -- do not reexecute |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
185 Reexecute_True = 1 // true -- reexecute the bytecode |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
186 } ReexecuteState; //Reexecute State |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
187 |
0 | 188 private: |
189 JVMState* _caller; // List pointer for forming scope chains | |
190 uint _depth; // One mroe than caller depth, or one. | |
191 uint _locoff; // Offset to locals in input edge mapping | |
192 uint _stkoff; // Offset to stack in input edge mapping | |
193 uint _monoff; // Offset to monitors in input edge mapping | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
194 uint _scloff; // Offset to fields of scalar objs in input edge mapping |
0 | 195 uint _endoff; // Offset to end of input edge mapping |
196 uint _sp; // Jave Expression Stack Pointer for this state | |
197 int _bci; // Byte Code Index of this JVM point | |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
198 ReexecuteState _reexecute; // Whether this bytecode need to be re-executed |
0 | 199 ciMethod* _method; // Method Pointer |
200 SafePointNode* _map; // Map node associated with this scope | |
201 public: | |
202 friend class Compile; | |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
203 friend class PreserveReexecuteState; |
0 | 204 |
205 // Because JVMState objects live over the entire lifetime of the | |
206 // Compile object, they are allocated into the comp_arena, which | |
207 // does not get resource marked or reset during the compile process | |
208 void *operator new( size_t x, Compile* C ) { return C->comp_arena()->Amalloc(x); } | |
209 void operator delete( void * ) { } // fast deallocation | |
210 | |
211 // Create a new JVMState, ready for abstract interpretation. | |
212 JVMState(ciMethod* method, JVMState* caller); | |
213 JVMState(int stack_size); // root state; has a null method | |
214 | |
215 // Access functions for the JVM | |
216 uint locoff() const { return _locoff; } | |
217 uint stkoff() const { return _stkoff; } | |
218 uint argoff() const { return _stkoff + _sp; } | |
219 uint monoff() const { return _monoff; } | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
220 uint scloff() const { return _scloff; } |
0 | 221 uint endoff() const { return _endoff; } |
222 uint oopoff() const { return debug_end(); } | |
223 | |
224 int loc_size() const { return _stkoff - _locoff; } | |
225 int stk_size() const { return _monoff - _stkoff; } | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
226 int mon_size() const { return _scloff - _monoff; } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
227 int scl_size() const { return _endoff - _scloff; } |
0 | 228 |
229 bool is_loc(uint i) const { return i >= _locoff && i < _stkoff; } | |
230 bool is_stk(uint i) const { return i >= _stkoff && i < _monoff; } | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
231 bool is_mon(uint i) const { return i >= _monoff && i < _scloff; } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
232 bool is_scl(uint i) const { return i >= _scloff && i < _endoff; } |
0 | 233 |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
234 uint sp() const { return _sp; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
235 int bci() const { return _bci; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
236 bool should_reexecute() const { return _reexecute==Reexecute_True; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
237 bool is_reexecute_undefined() const { return _reexecute==Reexecute_Undefined; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
238 bool has_method() const { return _method != NULL; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
239 ciMethod* method() const { assert(has_method(), ""); return _method; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
240 JVMState* caller() const { return _caller; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
241 SafePointNode* map() const { return _map; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
242 uint depth() const { return _depth; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
243 uint debug_start() const; // returns locoff of root caller |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
244 uint debug_end() const; // returns endoff of self |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
245 uint debug_size() const { |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
246 return loc_size() + sp() + mon_size() + scl_size(); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
247 } |
0 | 248 uint debug_depth() const; // returns sum of debug_size values at all depths |
249 | |
250 // Returns the JVM state at the desired depth (1 == root). | |
251 JVMState* of_depth(int d) const; | |
252 | |
253 // Tells if two JVM states have the same call chain (depth, methods, & bcis). | |
254 bool same_calls_as(const JVMState* that) const; | |
255 | |
256 // Monitors (monitors are stored as (boxNode, objNode) pairs | |
257 enum { logMonitorEdges = 1 }; | |
258 int nof_monitors() const { return mon_size() >> logMonitorEdges; } | |
259 int monitor_depth() const { return nof_monitors() + (caller() ? caller()->monitor_depth() : 0); } | |
260 int monitor_box_offset(int idx) const { return monoff() + (idx << logMonitorEdges) + 0; } | |
261 int monitor_obj_offset(int idx) const { return monoff() + (idx << logMonitorEdges) + 1; } | |
262 bool is_monitor_box(uint off) const { | |
263 assert(is_mon(off), "should be called only for monitor edge"); | |
264 return (0 == bitfield(off - monoff(), 0, logMonitorEdges)); | |
265 } | |
266 bool is_monitor_use(uint off) const { return (is_mon(off) | |
267 && is_monitor_box(off)) | |
268 || (caller() && caller()->is_monitor_use(off)); } | |
269 | |
270 // Initialization functions for the JVM | |
271 void set_locoff(uint off) { _locoff = off; } | |
272 void set_stkoff(uint off) { _stkoff = off; } | |
273 void set_monoff(uint off) { _monoff = off; } | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
274 void set_scloff(uint off) { _scloff = off; } |
0 | 275 void set_endoff(uint off) { _endoff = off; } |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
276 void set_offsets(uint off) { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
277 _locoff = _stkoff = _monoff = _scloff = _endoff = off; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
278 } |
0 | 279 void set_map(SafePointNode *map) { _map = map; } |
280 void set_sp(uint sp) { _sp = sp; } | |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
281 // _reexecute is initialized to "undefined" for a new bci |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
282 void set_bci(int bci) {if(_bci != bci)_reexecute=Reexecute_Undefined; _bci = bci; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
283 void set_should_reexecute(bool reexec) {_reexecute = reexec ? Reexecute_True : Reexecute_False;} |
0 | 284 |
285 // Miscellaneous utility functions | |
286 JVMState* clone_deep(Compile* C) const; // recursively clones caller chain | |
287 JVMState* clone_shallow(Compile* C) const; // retains uncloned caller | |
288 | |
289 #ifndef PRODUCT | |
290 void format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) const; | |
291 void dump_spec(outputStream *st) const; | |
292 void dump_on(outputStream* st) const; | |
293 void dump() const { | |
294 dump_on(tty); | |
295 } | |
296 #endif | |
297 }; | |
298 | |
299 //------------------------------SafePointNode---------------------------------- | |
300 // A SafePointNode is a subclass of a MultiNode for convenience (and | |
301 // potential code sharing) only - conceptually it is independent of | |
302 // the Node semantics. | |
303 class SafePointNode : public MultiNode { | |
304 virtual uint cmp( const Node &n ) const; | |
305 virtual uint size_of() const; // Size is bigger | |
306 | |
307 public: | |
308 SafePointNode(uint edges, JVMState* jvms, | |
309 // A plain safepoint advertises no memory effects (NULL): | |
310 const TypePtr* adr_type = NULL) | |
311 : MultiNode( edges ), | |
312 _jvms(jvms), | |
313 _oop_map(NULL), | |
314 _adr_type(adr_type) | |
315 { | |
316 init_class_id(Class_SafePoint); | |
317 } | |
318 | |
319 OopMap* _oop_map; // Array of OopMap info (8-bit char) for GC | |
320 JVMState* const _jvms; // Pointer to list of JVM State objects | |
321 const TypePtr* _adr_type; // What type of memory does this node produce? | |
322 | |
323 // Many calls take *all* of memory as input, | |
324 // but some produce a limited subset of that memory as output. | |
325 // The adr_type reports the call's behavior as a store, not a load. | |
326 | |
327 virtual JVMState* jvms() const { return _jvms; } | |
328 void set_jvms(JVMState* s) { | |
329 *(JVMState**)&_jvms = s; // override const attribute in the accessor | |
330 } | |
331 OopMap *oop_map() const { return _oop_map; } | |
332 void set_oop_map(OopMap *om) { _oop_map = om; } | |
333 | |
334 // Functionality from old debug nodes which has changed | |
335 Node *local(JVMState* jvms, uint idx) const { | |
336 assert(verify_jvms(jvms), "jvms must match"); | |
337 return in(jvms->locoff() + idx); | |
338 } | |
339 Node *stack(JVMState* jvms, uint idx) const { | |
340 assert(verify_jvms(jvms), "jvms must match"); | |
341 return in(jvms->stkoff() + idx); | |
342 } | |
343 Node *argument(JVMState* jvms, uint idx) const { | |
344 assert(verify_jvms(jvms), "jvms must match"); | |
345 return in(jvms->argoff() + idx); | |
346 } | |
347 Node *monitor_box(JVMState* jvms, uint idx) const { | |
348 assert(verify_jvms(jvms), "jvms must match"); | |
349 return in(jvms->monitor_box_offset(idx)); | |
350 } | |
351 Node *monitor_obj(JVMState* jvms, uint idx) const { | |
352 assert(verify_jvms(jvms), "jvms must match"); | |
353 return in(jvms->monitor_obj_offset(idx)); | |
354 } | |
355 | |
356 void set_local(JVMState* jvms, uint idx, Node *c); | |
357 | |
358 void set_stack(JVMState* jvms, uint idx, Node *c) { | |
359 assert(verify_jvms(jvms), "jvms must match"); | |
360 set_req(jvms->stkoff() + idx, c); | |
361 } | |
362 void set_argument(JVMState* jvms, uint idx, Node *c) { | |
363 assert(verify_jvms(jvms), "jvms must match"); | |
364 set_req(jvms->argoff() + idx, c); | |
365 } | |
366 void ensure_stack(JVMState* jvms, uint stk_size) { | |
367 assert(verify_jvms(jvms), "jvms must match"); | |
368 int grow_by = (int)stk_size - (int)jvms->stk_size(); | |
369 if (grow_by > 0) grow_stack(jvms, grow_by); | |
370 } | |
371 void grow_stack(JVMState* jvms, uint grow_by); | |
372 // Handle monitor stack | |
373 void push_monitor( const FastLockNode *lock ); | |
374 void pop_monitor (); | |
375 Node *peek_monitor_box() const; | |
376 Node *peek_monitor_obj() const; | |
377 | |
378 // Access functions for the JVM | |
379 Node *control () const { return in(TypeFunc::Control ); } | |
380 Node *i_o () const { return in(TypeFunc::I_O ); } | |
381 Node *memory () const { return in(TypeFunc::Memory ); } | |
382 Node *returnadr() const { return in(TypeFunc::ReturnAdr); } | |
383 Node *frameptr () const { return in(TypeFunc::FramePtr ); } | |
384 | |
385 void set_control ( Node *c ) { set_req(TypeFunc::Control,c); } | |
386 void set_i_o ( Node *c ) { set_req(TypeFunc::I_O ,c); } | |
387 void set_memory ( Node *c ) { set_req(TypeFunc::Memory ,c); } | |
388 | |
389 MergeMemNode* merged_memory() const { | |
390 return in(TypeFunc::Memory)->as_MergeMem(); | |
391 } | |
392 | |
393 // The parser marks useless maps as dead when it's done with them: | |
394 bool is_killed() { return in(TypeFunc::Control) == NULL; } | |
395 | |
396 // Exception states bubbling out of subgraphs such as inlined calls | |
397 // are recorded here. (There might be more than one, hence the "next".) | |
398 // This feature is used only for safepoints which serve as "maps" | |
399 // for JVM states during parsing, intrinsic expansion, etc. | |
400 SafePointNode* next_exception() const; | |
401 void set_next_exception(SafePointNode* n); | |
402 bool has_exceptions() const { return next_exception() != NULL; } | |
403 | |
404 // Standard Node stuff | |
405 virtual int Opcode() const; | |
406 virtual bool pinned() const { return true; } | |
407 virtual const Type *Value( PhaseTransform *phase ) const; | |
408 virtual const Type *bottom_type() const { return Type::CONTROL; } | |
409 virtual const TypePtr *adr_type() const { return _adr_type; } | |
410 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | |
411 virtual Node *Identity( PhaseTransform *phase ); | |
412 virtual uint ideal_reg() const { return 0; } | |
413 virtual const RegMask &in_RegMask(uint) const; | |
414 virtual const RegMask &out_RegMask() const; | |
415 virtual uint match_edge(uint idx) const; | |
416 | |
417 static bool needs_polling_address_input(); | |
418 | |
419 #ifndef PRODUCT | |
420 virtual void dump_spec(outputStream *st) const; | |
421 #endif | |
422 }; | |
423 | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
424 //------------------------------SafePointScalarObjectNode---------------------- |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
425 // A SafePointScalarObjectNode represents the state of a scalarized object |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
426 // at a safepoint. |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
427 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
428 class SafePointScalarObjectNode: public TypeNode { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
429 uint _first_index; // First input edge index of a SafePoint node where |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
430 // states of the scalarized object fields are collected. |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
431 uint _n_fields; // Number of non-static fields of the scalarized object. |
74
2a9af0b9cb1c
6674600: (Escape Analysis) Optimize memory graph for instance's fields
kvn
parents:
65
diff
changeset
|
432 DEBUG_ONLY(AllocateNode* _alloc;) |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
433 public: |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
434 SafePointScalarObjectNode(const TypeOopPtr* tp, |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
435 #ifdef ASSERT |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
436 AllocateNode* alloc, |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
437 #endif |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
438 uint first_index, uint n_fields); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
439 virtual int Opcode() const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
440 virtual uint ideal_reg() const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
441 virtual const RegMask &in_RegMask(uint) const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
442 virtual const RegMask &out_RegMask() const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
443 virtual uint match_edge(uint idx) const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
444 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
445 uint first_index() const { return _first_index; } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
446 uint n_fields() const { return _n_fields; } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
447 DEBUG_ONLY(AllocateNode* alloc() const { return _alloc; }) |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
448 |
420
a1980da045cc
6462850: generate biased locking code in C2 ideal graph
kvn
parents:
366
diff
changeset
|
449 // SafePointScalarObject should be always pinned to the control edge |
a1980da045cc
6462850: generate biased locking code in C2 ideal graph
kvn
parents:
366
diff
changeset
|
450 // of the SafePoint node for which it was generated. |
a1980da045cc
6462850: generate biased locking code in C2 ideal graph
kvn
parents:
366
diff
changeset
|
451 virtual bool pinned() const; // { return true; } |
a1980da045cc
6462850: generate biased locking code in C2 ideal graph
kvn
parents:
366
diff
changeset
|
452 |
601
523ded093c31
6809798: SafePointScalarObject node placed into incorrect block during GCM
kvn
parents:
460
diff
changeset
|
453 // SafePointScalarObject depends on the SafePoint node |
523ded093c31
6809798: SafePointScalarObject node placed into incorrect block during GCM
kvn
parents:
460
diff
changeset
|
454 // for which it was generated. |
523ded093c31
6809798: SafePointScalarObject node placed into incorrect block during GCM
kvn
parents:
460
diff
changeset
|
455 virtual bool depends_only_on_test() const; // { return false; } |
523ded093c31
6809798: SafePointScalarObject node placed into incorrect block during GCM
kvn
parents:
460
diff
changeset
|
456 |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
457 virtual uint size_of() const { return sizeof(*this); } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
458 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
459 // Assumes that "this" is an argument to a safepoint node "s", and that |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
460 // "new_call" is being created to correspond to "s". But the difference |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
461 // between the start index of the jvmstates of "new_call" and "s" is |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
462 // "jvms_adj". Produce and return a SafePointScalarObjectNode that |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
463 // corresponds appropriately to "this" in "new_call". Assumes that |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
464 // "sosn_map" is a map, specific to the translation of "s" to "new_call", |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
465 // mapping old SafePointScalarObjectNodes to new, to avoid multiple copies. |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
466 SafePointScalarObjectNode* clone(int jvms_adj, Dict* sosn_map) const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
467 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
468 #ifndef PRODUCT |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
469 virtual void dump_spec(outputStream *st) const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
470 #endif |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
471 }; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
472 |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
473 |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
474 // Simple container for the outgoing projections of a call. Useful |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
475 // for serious surgery on calls. |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
476 class CallProjections : public StackObj { |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
477 public: |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
478 Node* fallthrough_proj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
479 Node* fallthrough_catchproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
480 Node* fallthrough_memproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
481 Node* fallthrough_ioproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
482 Node* catchall_catchproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
483 Node* catchall_memproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
484 Node* catchall_ioproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
485 Node* resproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
486 Node* exobj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
487 }; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
488 |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
489 |
0 | 490 //------------------------------CallNode--------------------------------------- |
491 // Call nodes now subsume the function of debug nodes at callsites, so they | |
492 // contain the functionality of a full scope chain of debug nodes. | |
493 class CallNode : public SafePointNode { | |
494 public: | |
495 const TypeFunc *_tf; // Function type | |
496 address _entry_point; // Address of method being called | |
497 float _cnt; // Estimate of number of times called | |
498 | |
499 CallNode(const TypeFunc* tf, address addr, const TypePtr* adr_type) | |
500 : SafePointNode(tf->domain()->cnt(), NULL, adr_type), | |
501 _tf(tf), | |
502 _entry_point(addr), | |
503 _cnt(COUNT_UNKNOWN) | |
504 { | |
505 init_class_id(Class_Call); | |
506 init_flags(Flag_is_Call); | |
507 } | |
508 | |
509 const TypeFunc* tf() const { return _tf; } | |
510 const address entry_point() const { return _entry_point; } | |
511 const float cnt() const { return _cnt; } | |
512 | |
513 void set_tf(const TypeFunc* tf) { _tf = tf; } | |
514 void set_entry_point(address p) { _entry_point = p; } | |
515 void set_cnt(float c) { _cnt = c; } | |
516 | |
517 virtual const Type *bottom_type() const; | |
518 virtual const Type *Value( PhaseTransform *phase ) const; | |
519 virtual Node *Identity( PhaseTransform *phase ) { return this; } | |
520 virtual uint cmp( const Node &n ) const; | |
521 virtual uint size_of() const = 0; | |
522 virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const; | |
523 virtual Node *match( const ProjNode *proj, const Matcher *m ); | |
524 virtual uint ideal_reg() const { return NotAMachineReg; } | |
525 // Are we guaranteed that this node is a safepoint? Not true for leaf calls and | |
526 // for some macro nodes whose expansion does not have a safepoint on the fast path. | |
527 virtual bool guaranteed_safepoint() { return true; } | |
528 // For macro nodes, the JVMState gets modified during expansion, so when cloning | |
529 // the node the JVMState must be cloned. | |
530 virtual void clone_jvms() { } // default is not to clone | |
531 | |
65 | 532 // Returns true if the call may modify n |
533 virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase); | |
534 // Does this node have a use of n other than in debug information? | |
168
7793bd37a336
6705887: Compressed Oops: generate x64 addressing and implicit null checks with narrow oops
kvn
parents:
113
diff
changeset
|
535 bool has_non_debug_use(Node *n); |
65 | 536 // Returns the unique CheckCastPP of a call |
537 // or result projection is there are several CheckCastPP | |
538 // or returns NULL if there is no one. | |
539 Node *result_cast(); | |
540 | |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
541 // Collect all the interesting edges from a call for use in |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
542 // replacing the call by something else. Used by macro expansion |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
543 // and the late inlining support. |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
544 void extract_projections(CallProjections* projs, bool separate_io_proj); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
545 |
0 | 546 virtual uint match_edge(uint idx) const; |
547 | |
548 #ifndef PRODUCT | |
549 virtual void dump_req() const; | |
550 virtual void dump_spec(outputStream *st) const; | |
551 #endif | |
552 }; | |
553 | |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
554 |
0 | 555 //------------------------------CallJavaNode----------------------------------- |
556 // Make a static or dynamic subroutine call node using Java calling | |
557 // convention. (The "Java" calling convention is the compiler's calling | |
558 // convention, as opposed to the interpreter's or that of native C.) | |
559 class CallJavaNode : public CallNode { | |
560 protected: | |
561 virtual uint cmp( const Node &n ) const; | |
562 virtual uint size_of() const; // Size is bigger | |
563 | |
564 bool _optimized_virtual; | |
565 ciMethod* _method; // Method being direct called | |
566 public: | |
567 const int _bci; // Byte Code Index of call byte code | |
568 CallJavaNode(const TypeFunc* tf , address addr, ciMethod* method, int bci) | |
569 : CallNode(tf, addr, TypePtr::BOTTOM), | |
570 _method(method), _bci(bci), _optimized_virtual(false) | |
571 { | |
572 init_class_id(Class_CallJava); | |
573 } | |
574 | |
575 virtual int Opcode() const; | |
576 ciMethod* method() const { return _method; } | |
577 void set_method(ciMethod *m) { _method = m; } | |
578 void set_optimized_virtual(bool f) { _optimized_virtual = f; } | |
579 bool is_optimized_virtual() const { return _optimized_virtual; } | |
580 | |
581 #ifndef PRODUCT | |
582 virtual void dump_spec(outputStream *st) const; | |
583 #endif | |
584 }; | |
585 | |
586 //------------------------------CallStaticJavaNode----------------------------- | |
587 // Make a direct subroutine call using Java calling convention (for static | |
588 // calls and optimized virtual calls, plus calls to wrappers for run-time | |
589 // routines); generates static stub. | |
590 class CallStaticJavaNode : public CallJavaNode { | |
591 virtual uint cmp( const Node &n ) const; | |
592 virtual uint size_of() const; // Size is bigger | |
593 public: | |
594 CallStaticJavaNode(const TypeFunc* tf, address addr, ciMethod* method, int bci) | |
595 : CallJavaNode(tf, addr, method, bci), _name(NULL) { | |
596 init_class_id(Class_CallStaticJava); | |
597 } | |
598 CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, int bci, | |
599 const TypePtr* adr_type) | |
600 : CallJavaNode(tf, addr, NULL, bci), _name(name) { | |
601 init_class_id(Class_CallStaticJava); | |
602 // This node calls a runtime stub, which often has narrow memory effects. | |
603 _adr_type = adr_type; | |
604 } | |
605 const char *_name; // Runtime wrapper name | |
606 | |
607 // If this is an uncommon trap, return the request code, else zero. | |
608 int uncommon_trap_request() const; | |
609 static int extract_uncommon_trap_request(const Node* call); | |
610 | |
611 virtual int Opcode() const; | |
612 #ifndef PRODUCT | |
613 virtual void dump_spec(outputStream *st) const; | |
614 #endif | |
615 }; | |
616 | |
617 //------------------------------CallDynamicJavaNode---------------------------- | |
618 // Make a dispatched call using Java calling convention. | |
619 class CallDynamicJavaNode : public CallJavaNode { | |
620 virtual uint cmp( const Node &n ) const; | |
621 virtual uint size_of() const; // Size is bigger | |
622 public: | |
623 CallDynamicJavaNode( const TypeFunc *tf , address addr, ciMethod* method, int vtable_index, int bci ) : CallJavaNode(tf,addr,method,bci), _vtable_index(vtable_index) { | |
624 init_class_id(Class_CallDynamicJava); | |
625 } | |
626 | |
627 int _vtable_index; | |
628 virtual int Opcode() const; | |
629 #ifndef PRODUCT | |
630 virtual void dump_spec(outputStream *st) const; | |
631 #endif | |
632 }; | |
633 | |
634 //------------------------------CallRuntimeNode-------------------------------- | |
635 // Make a direct subroutine call node into compiled C++ code. | |
636 class CallRuntimeNode : public CallNode { | |
637 virtual uint cmp( const Node &n ) const; | |
638 virtual uint size_of() const; // Size is bigger | |
639 public: | |
640 CallRuntimeNode(const TypeFunc* tf, address addr, const char* name, | |
641 const TypePtr* adr_type) | |
642 : CallNode(tf, addr, adr_type), | |
643 _name(name) | |
644 { | |
645 init_class_id(Class_CallRuntime); | |
646 } | |
647 | |
648 const char *_name; // Printable name, if _method is NULL | |
649 virtual int Opcode() const; | |
650 virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const; | |
651 | |
652 #ifndef PRODUCT | |
653 virtual void dump_spec(outputStream *st) const; | |
654 #endif | |
655 }; | |
656 | |
657 //------------------------------CallLeafNode----------------------------------- | |
658 // Make a direct subroutine call node into compiled C++ code, without | |
659 // safepoints | |
660 class CallLeafNode : public CallRuntimeNode { | |
661 public: | |
662 CallLeafNode(const TypeFunc* tf, address addr, const char* name, | |
663 const TypePtr* adr_type) | |
664 : CallRuntimeNode(tf, addr, name, adr_type) | |
665 { | |
666 init_class_id(Class_CallLeaf); | |
667 } | |
668 virtual int Opcode() const; | |
669 virtual bool guaranteed_safepoint() { return false; } | |
670 #ifndef PRODUCT | |
671 virtual void dump_spec(outputStream *st) const; | |
672 #endif | |
673 }; | |
674 | |
675 //------------------------------CallLeafNoFPNode------------------------------- | |
676 // CallLeafNode, not using floating point or using it in the same manner as | |
677 // the generated code | |
678 class CallLeafNoFPNode : public CallLeafNode { | |
679 public: | |
680 CallLeafNoFPNode(const TypeFunc* tf, address addr, const char* name, | |
681 const TypePtr* adr_type) | |
682 : CallLeafNode(tf, addr, name, adr_type) | |
683 { | |
684 } | |
685 virtual int Opcode() const; | |
686 }; | |
687 | |
688 | |
689 //------------------------------Allocate--------------------------------------- | |
690 // High-level memory allocation | |
691 // | |
692 // AllocateNode and AllocateArrayNode are subclasses of CallNode because they will | |
693 // get expanded into a code sequence containing a call. Unlike other CallNodes, | |
694 // they have 2 memory projections and 2 i_o projections (which are distinguished by | |
695 // the _is_io_use flag in the projection.) This is needed when expanding the node in | |
696 // order to differentiate the uses of the projection on the normal control path from | |
697 // those on the exception return path. | |
698 // | |
699 class AllocateNode : public CallNode { | |
700 public: | |
701 enum { | |
702 // Output: | |
703 RawAddress = TypeFunc::Parms, // the newly-allocated raw address | |
704 // Inputs: | |
705 AllocSize = TypeFunc::Parms, // size (in bytes) of the new object | |
706 KlassNode, // type (maybe dynamic) of the obj. | |
707 InitialTest, // slow-path test (may be constant) | |
708 ALength, // array length (or TOP if none) | |
709 ParmLimit | |
710 }; | |
711 | |
712 static const TypeFunc* alloc_type() { | |
713 const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms); | |
714 fields[AllocSize] = TypeInt::POS; | |
715 fields[KlassNode] = TypeInstPtr::NOTNULL; | |
716 fields[InitialTest] = TypeInt::BOOL; | |
717 fields[ALength] = TypeInt::INT; // length (can be a bad length) | |
718 | |
719 const TypeTuple *domain = TypeTuple::make(ParmLimit, fields); | |
720 | |
721 // create result type (range) | |
722 fields = TypeTuple::fields(1); | |
723 fields[TypeFunc::Parms+0] = TypeRawPtr::NOTNULL; // Returned oop | |
724 | |
725 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields); | |
726 | |
727 return TypeFunc::make(domain, range); | |
728 } | |
729 | |
39
76256d272075
6667612: (Escape Analysis) disable loop cloning if it has a scalar replaceable allocation
kvn
parents:
33
diff
changeset
|
730 bool _is_scalar_replaceable; // Result of Escape Analysis |
76256d272075
6667612: (Escape Analysis) disable loop cloning if it has a scalar replaceable allocation
kvn
parents:
33
diff
changeset
|
731 |
0 | 732 virtual uint size_of() const; // Size is bigger |
733 AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio, | |
734 Node *size, Node *klass_node, Node *initial_test); | |
735 // Expansion modifies the JVMState, so we need to clone it | |
736 virtual void clone_jvms() { | |
737 set_jvms(jvms()->clone_deep(Compile::current())); | |
738 } | |
739 virtual int Opcode() const; | |
740 virtual uint ideal_reg() const { return Op_RegP; } | |
741 virtual bool guaranteed_safepoint() { return false; } | |
742 | |
65 | 743 // allocations do not modify their arguments |
744 virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase) { return false;} | |
745 | |
0 | 746 // Pattern-match a possible usage of AllocateNode. |
747 // Return null if no allocation is recognized. | |
748 // The operand is the pointer produced by the (possible) allocation. | |
749 // It must be a projection of the Allocate or its subsequent CastPP. | |
750 // (Note: This function is defined in file graphKit.cpp, near | |
751 // GraphKit::new_instance/new_array, whose output it recognizes.) | |
752 // The 'ptr' may not have an offset unless the 'offset' argument is given. | |
753 static AllocateNode* Ideal_allocation(Node* ptr, PhaseTransform* phase); | |
754 | |
755 // Fancy version which uses AddPNode::Ideal_base_and_offset to strip | |
756 // an offset, which is reported back to the caller. | |
757 // (Note: AllocateNode::Ideal_allocation is defined in graphKit.cpp.) | |
758 static AllocateNode* Ideal_allocation(Node* ptr, PhaseTransform* phase, | |
759 intptr_t& offset); | |
760 | |
761 // Dig the klass operand out of a (possible) allocation site. | |
762 static Node* Ideal_klass(Node* ptr, PhaseTransform* phase) { | |
763 AllocateNode* allo = Ideal_allocation(ptr, phase); | |
764 return (allo == NULL) ? NULL : allo->in(KlassNode); | |
765 } | |
766 | |
767 // Conservatively small estimate of offset of first non-header byte. | |
768 int minimum_header_size() { | |
113
ba764ed4b6f2
6420645: Create a vm that uses compressed oops for up to 32gb heapsizes
coleenp
parents:
74
diff
changeset
|
769 return is_AllocateArray() ? arrayOopDesc::base_offset_in_bytes(T_BYTE) : |
ba764ed4b6f2
6420645: Create a vm that uses compressed oops for up to 32gb heapsizes
coleenp
parents:
74
diff
changeset
|
770 instanceOopDesc::base_offset_in_bytes(); |
0 | 771 } |
772 | |
773 // Return the corresponding initialization barrier (or null if none). | |
774 // Walks out edges to find it... | |
775 // (Note: Both InitializeNode::allocation and AllocateNode::initialization | |
776 // are defined in graphKit.cpp, which sets up the bidirectional relation.) | |
777 InitializeNode* initialization(); | |
778 | |
779 // Convenience for initialization->maybe_set_complete(phase) | |
780 bool maybe_set_complete(PhaseGVN* phase); | |
781 }; | |
782 | |
783 //------------------------------AllocateArray--------------------------------- | |
784 // | |
785 // High-level array allocation | |
786 // | |
787 class AllocateArrayNode : public AllocateNode { | |
788 public: | |
789 AllocateArrayNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio, | |
790 Node* size, Node* klass_node, Node* initial_test, | |
791 Node* count_val | |
792 ) | |
793 : AllocateNode(C, atype, ctrl, mem, abio, size, klass_node, | |
794 initial_test) | |
795 { | |
796 init_class_id(Class_AllocateArray); | |
797 set_req(AllocateNode::ALength, count_val); | |
798 } | |
799 virtual int Opcode() const; | |
800 virtual uint size_of() const; // Size is bigger | |
704
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
801 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); |
0 | 802 |
366
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
803 // Dig the length operand out of a array allocation site. |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
804 Node* Ideal_length() { |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
805 return in(AllocateNode::ALength); |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
806 } |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
807 |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
808 // Dig the length operand out of a array allocation site and narrow the |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
809 // type with a CastII, if necesssary |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
810 Node* make_ideal_length(const TypeOopPtr* ary_type, PhaseTransform *phase, bool can_create = true); |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
811 |
0 | 812 // Pattern-match a possible usage of AllocateArrayNode. |
813 // Return null if no allocation is recognized. | |
814 static AllocateArrayNode* Ideal_array_allocation(Node* ptr, PhaseTransform* phase) { | |
815 AllocateNode* allo = Ideal_allocation(ptr, phase); | |
816 return (allo == NULL || !allo->is_AllocateArray()) | |
817 ? NULL : allo->as_AllocateArray(); | |
818 } | |
819 }; | |
820 | |
821 //------------------------------AbstractLockNode----------------------------------- | |
822 class AbstractLockNode: public CallNode { | |
823 private: | |
460
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
824 bool _eliminate; // indicates this lock can be safely eliminated |
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
825 bool _coarsened; // indicates this lock was coarsened |
0 | 826 #ifndef PRODUCT |
827 NamedCounter* _counter; | |
828 #endif | |
829 | |
830 protected: | |
831 // helper functions for lock elimination | |
832 // | |
833 | |
834 bool find_matching_unlock(const Node* ctrl, LockNode* lock, | |
835 GrowableArray<AbstractLockNode*> &lock_ops); | |
836 bool find_lock_and_unlock_through_if(Node* node, LockNode* lock, | |
837 GrowableArray<AbstractLockNode*> &lock_ops); | |
838 bool find_unlocks_for_region(const RegionNode* region, LockNode* lock, | |
839 GrowableArray<AbstractLockNode*> &lock_ops); | |
840 LockNode *find_matching_lock(UnlockNode* unlock); | |
841 | |
842 | |
843 public: | |
844 AbstractLockNode(const TypeFunc *tf) | |
845 : CallNode(tf, NULL, TypeRawPtr::BOTTOM), | |
460
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
846 _coarsened(false), |
0 | 847 _eliminate(false) |
848 { | |
849 #ifndef PRODUCT | |
850 _counter = NULL; | |
851 #endif | |
852 } | |
853 virtual int Opcode() const = 0; | |
854 Node * obj_node() const {return in(TypeFunc::Parms + 0); } | |
855 Node * box_node() const {return in(TypeFunc::Parms + 1); } | |
856 Node * fastlock_node() const {return in(TypeFunc::Parms + 2); } | |
857 const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;} | |
858 | |
859 virtual uint size_of() const { return sizeof(*this); } | |
860 | |
861 bool is_eliminated() {return _eliminate; } | |
862 // mark node as eliminated and update the counter if there is one | |
863 void set_eliminated(); | |
864 | |
460
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
865 bool is_coarsened() { return _coarsened; } |
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
866 void set_coarsened() { _coarsened = true; } |
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
867 |
65 | 868 // locking does not modify its arguments |
869 virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase){ return false;} | |
870 | |
0 | 871 #ifndef PRODUCT |
872 void create_lock_counter(JVMState* s); | |
873 NamedCounter* counter() const { return _counter; } | |
874 #endif | |
875 }; | |
876 | |
877 //------------------------------Lock--------------------------------------- | |
878 // High-level lock operation | |
879 // | |
880 // This is a subclass of CallNode because it is a macro node which gets expanded | |
881 // into a code sequence containing a call. This node takes 3 "parameters": | |
882 // 0 - object to lock | |
883 // 1 - a BoxLockNode | |
884 // 2 - a FastLockNode | |
885 // | |
886 class LockNode : public AbstractLockNode { | |
887 public: | |
888 | |
889 static const TypeFunc *lock_type() { | |
890 // create input type (domain) | |
891 const Type **fields = TypeTuple::fields(3); | |
892 fields[TypeFunc::Parms+0] = TypeInstPtr::NOTNULL; // Object to be Locked | |
893 fields[TypeFunc::Parms+1] = TypeRawPtr::BOTTOM; // Address of stack location for lock | |
894 fields[TypeFunc::Parms+2] = TypeInt::BOOL; // FastLock | |
895 const TypeTuple *domain = TypeTuple::make(TypeFunc::Parms+3,fields); | |
896 | |
897 // create result type (range) | |
898 fields = TypeTuple::fields(0); | |
899 | |
900 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0,fields); | |
901 | |
902 return TypeFunc::make(domain,range); | |
903 } | |
904 | |
905 virtual int Opcode() const; | |
906 virtual uint size_of() const; // Size is bigger | |
907 LockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) { | |
908 init_class_id(Class_Lock); | |
909 init_flags(Flag_is_macro); | |
910 C->add_macro_node(this); | |
911 } | |
912 virtual bool guaranteed_safepoint() { return false; } | |
913 | |
914 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | |
915 // Expansion modifies the JVMState, so we need to clone it | |
916 virtual void clone_jvms() { | |
917 set_jvms(jvms()->clone_deep(Compile::current())); | |
918 } | |
919 }; | |
920 | |
921 //------------------------------Unlock--------------------------------------- | |
922 // High-level unlock operation | |
923 class UnlockNode : public AbstractLockNode { | |
924 public: | |
925 virtual int Opcode() const; | |
926 virtual uint size_of() const; // Size is bigger | |
927 UnlockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) { | |
928 init_class_id(Class_Unlock); | |
929 init_flags(Flag_is_macro); | |
930 C->add_macro_node(this); | |
931 } | |
932 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | |
933 // unlock is never a safepoint | |
934 virtual bool guaranteed_safepoint() { return false; } | |
935 }; |