Mercurial > hg > truffle
annotate src/share/vm/opto/callnode.hpp @ 9126:bc26f978b0ce
HotSpotResolvedObjectType: implement hasFinalizeSubclass() correctly
don't use the (wrong) cached value, but ask the runtime on each request.
Fixes regression on xml.* benchmarks @ specjvm2008. The problem was:
After the constructor of Object was deoptimized due to an assumption violation,
it was recompiled again after some time. However, on recompilation, the value
of hasFinalizeSubclass for the class was not updated and it was compiled again
with a, now wrong, assumption, which then triggers deoptimization again.
This was repeated until it hit the recompilation limit (defined by
PerMethodRecompilationCutoff), and therefore only executed by the interpreter
from now on, causing the performance regression.
author | Bernhard Urban <bernhard.urban@jku.at> |
---|---|
date | Mon, 15 Apr 2013 19:54:58 +0200 |
parents | a7114d3d712e |
children | 6f3fd5150b67 |
rev | line source |
---|---|
0 | 1 /* |
5948
ee138854b3a6
7147744: CTW: assert(false) failed: infinite EA connection graph build
kvn
parents:
4777
diff
changeset
|
2 * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved. |
0 | 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 * | |
5 * This code is free software; you can redistribute it and/or modify it | |
6 * under the terms of the GNU General Public License version 2 only, as | |
7 * published by the Free Software Foundation. | |
8 * | |
9 * This code is distributed in the hope that it will be useful, but WITHOUT | |
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
12 * version 2 for more details (a copy is included in the LICENSE file that | |
13 * accompanied this code). | |
14 * | |
15 * You should have received a copy of the GNU General Public License version | |
16 * 2 along with this work; if not, write to the Free Software Foundation, | |
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. | |
18 * | |
1552
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1137
diff
changeset
|
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1137
diff
changeset
|
20 * or visit www.oracle.com if you need additional information or have any |
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1137
diff
changeset
|
21 * questions. |
0 | 22 * |
23 */ | |
24 | |
1972 | 25 #ifndef SHARE_VM_OPTO_CALLNODE_HPP |
26 #define SHARE_VM_OPTO_CALLNODE_HPP | |
27 | |
28 #include "opto/connode.hpp" | |
29 #include "opto/mulnode.hpp" | |
30 #include "opto/multnode.hpp" | |
31 #include "opto/opcodes.hpp" | |
32 #include "opto/phaseX.hpp" | |
33 #include "opto/type.hpp" | |
34 | |
0 | 35 // Portions of code courtesy of Clifford Click |
36 | |
37 // Optimization - Graph Style | |
38 | |
39 class Chaitin; | |
40 class NamedCounter; | |
41 class MultiNode; | |
42 class SafePointNode; | |
43 class CallNode; | |
44 class CallJavaNode; | |
45 class CallStaticJavaNode; | |
46 class CallDynamicJavaNode; | |
47 class CallRuntimeNode; | |
48 class CallLeafNode; | |
49 class CallLeafNoFPNode; | |
50 class AllocateNode; | |
33 | 51 class AllocateArrayNode; |
0 | 52 class LockNode; |
53 class UnlockNode; | |
54 class JVMState; | |
55 class OopMap; | |
56 class State; | |
57 class StartNode; | |
58 class MachCallNode; | |
59 class FastLockNode; | |
60 | |
61 //------------------------------StartNode-------------------------------------- | |
62 // The method start node | |
63 class StartNode : public MultiNode { | |
64 virtual uint cmp( const Node &n ) const; | |
65 virtual uint size_of() const; // Size is bigger | |
66 public: | |
67 const TypeTuple *_domain; | |
68 StartNode( Node *root, const TypeTuple *domain ) : MultiNode(2), _domain(domain) { | |
69 init_class_id(Class_Start); | |
70 init_req(0,this); | |
71 init_req(1,root); | |
72 } | |
73 virtual int Opcode() const; | |
74 virtual bool pinned() const { return true; }; | |
75 virtual const Type *bottom_type() const; | |
76 virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; } | |
77 virtual const Type *Value( PhaseTransform *phase ) const; | |
78 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | |
79 virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_reg, uint length ) const; | |
80 virtual const RegMask &in_RegMask(uint) const; | |
81 virtual Node *match( const ProjNode *proj, const Matcher *m ); | |
82 virtual uint ideal_reg() const { return 0; } | |
83 #ifndef PRODUCT | |
84 virtual void dump_spec(outputStream *st) const; | |
85 #endif | |
86 }; | |
87 | |
88 //------------------------------StartOSRNode----------------------------------- | |
89 // The method start node for on stack replacement code | |
90 class StartOSRNode : public StartNode { | |
91 public: | |
92 StartOSRNode( Node *root, const TypeTuple *domain ) : StartNode(root, domain) {} | |
93 virtual int Opcode() const; | |
94 static const TypeTuple *osr_domain(); | |
95 }; | |
96 | |
97 | |
98 //------------------------------ParmNode--------------------------------------- | |
99 // Incoming parameters | |
100 class ParmNode : public ProjNode { | |
101 static const char * const names[TypeFunc::Parms+1]; | |
102 public: | |
33 | 103 ParmNode( StartNode *src, uint con ) : ProjNode(src,con) { |
104 init_class_id(Class_Parm); | |
105 } | |
0 | 106 virtual int Opcode() const; |
107 virtual bool is_CFG() const { return (_con == TypeFunc::Control); } | |
108 virtual uint ideal_reg() const; | |
109 #ifndef PRODUCT | |
110 virtual void dump_spec(outputStream *st) const; | |
111 #endif | |
112 }; | |
113 | |
114 | |
115 //------------------------------ReturnNode------------------------------------- | |
116 // Return from subroutine node | |
117 class ReturnNode : public Node { | |
118 public: | |
119 ReturnNode( uint edges, Node *cntrl, Node *i_o, Node *memory, Node *retadr, Node *frameptr ); | |
120 virtual int Opcode() const; | |
121 virtual bool is_CFG() const { return true; } | |
122 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash | |
123 virtual bool depends_only_on_test() const { return false; } | |
124 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | |
125 virtual const Type *Value( PhaseTransform *phase ) const; | |
126 virtual uint ideal_reg() const { return NotAMachineReg; } | |
127 virtual uint match_edge(uint idx) const; | |
128 #ifndef PRODUCT | |
7636
a7114d3d712e
8005055: pass outputStream to more opto debug routines
kvn
parents:
7473
diff
changeset
|
129 virtual void dump_req(outputStream *st = tty) const; |
0 | 130 #endif |
131 }; | |
132 | |
133 | |
134 //------------------------------RethrowNode------------------------------------ | |
135 // Rethrow of exception at call site. Ends a procedure before rethrowing; | |
136 // ends the current basic block like a ReturnNode. Restores registers and | |
137 // unwinds stack. Rethrow happens in the caller's method. | |
138 class RethrowNode : public Node { | |
139 public: | |
140 RethrowNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *ret_adr, Node *exception ); | |
141 virtual int Opcode() const; | |
142 virtual bool is_CFG() const { return true; } | |
143 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash | |
144 virtual bool depends_only_on_test() const { return false; } | |
145 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | |
146 virtual const Type *Value( PhaseTransform *phase ) const; | |
147 virtual uint match_edge(uint idx) const; | |
148 virtual uint ideal_reg() const { return NotAMachineReg; } | |
149 #ifndef PRODUCT | |
7636
a7114d3d712e
8005055: pass outputStream to more opto debug routines
kvn
parents:
7473
diff
changeset
|
150 virtual void dump_req(outputStream *st = tty) const; |
0 | 151 #endif |
152 }; | |
153 | |
154 | |
155 //------------------------------TailCallNode----------------------------------- | |
156 // Pop stack frame and jump indirect | |
157 class TailCallNode : public ReturnNode { | |
158 public: | |
159 TailCallNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *retadr, Node *target, Node *moop ) | |
160 : ReturnNode( TypeFunc::Parms+2, cntrl, i_o, memory, frameptr, retadr ) { | |
161 init_req(TypeFunc::Parms, target); | |
162 init_req(TypeFunc::Parms+1, moop); | |
163 } | |
164 | |
165 virtual int Opcode() const; | |
166 virtual uint match_edge(uint idx) const; | |
167 }; | |
168 | |
169 //------------------------------TailJumpNode----------------------------------- | |
170 // Pop stack frame and jump indirect | |
171 class TailJumpNode : public ReturnNode { | |
172 public: | |
173 TailJumpNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *target, Node *ex_oop) | |
174 : ReturnNode(TypeFunc::Parms+2, cntrl, i_o, memory, frameptr, Compile::current()->top()) { | |
175 init_req(TypeFunc::Parms, target); | |
176 init_req(TypeFunc::Parms+1, ex_oop); | |
177 } | |
178 | |
179 virtual int Opcode() const; | |
180 virtual uint match_edge(uint idx) const; | |
181 }; | |
182 | |
183 //-------------------------------JVMState------------------------------------- | |
184 // A linked list of JVMState nodes captures the whole interpreter state, | |
185 // plus GC roots, for all active calls at some call site in this compilation | |
186 // unit. (If there is no inlining, then the list has exactly one link.) | |
187 // This provides a way to map the optimized program back into the interpreter, | |
188 // or to let the GC mark the stack. | |
189 class JVMState : public ResourceObj { | |
3939 | 190 friend class VMStructs; |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
191 public: |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
192 typedef enum { |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
193 Reexecute_Undefined = -1, // not defined -- will be translated into false later |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
194 Reexecute_False = 0, // false -- do not reexecute |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
195 Reexecute_True = 1 // true -- reexecute the bytecode |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
196 } ReexecuteState; //Reexecute State |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
197 |
0 | 198 private: |
199 JVMState* _caller; // List pointer for forming scope chains | |
6266
1d7922586cf6
7023639: JSR 292 method handle invocation needs a fast path for compiled code
twisti
parents:
5948
diff
changeset
|
200 uint _depth; // One more than caller depth, or one. |
0 | 201 uint _locoff; // Offset to locals in input edge mapping |
202 uint _stkoff; // Offset to stack in input edge mapping | |
203 uint _monoff; // Offset to monitors in input edge mapping | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
204 uint _scloff; // Offset to fields of scalar objs in input edge mapping |
0 | 205 uint _endoff; // Offset to end of input edge mapping |
206 uint _sp; // Jave Expression Stack Pointer for this state | |
207 int _bci; // Byte Code Index of this JVM point | |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
208 ReexecuteState _reexecute; // Whether this bytecode need to be re-executed |
0 | 209 ciMethod* _method; // Method Pointer |
210 SafePointNode* _map; // Map node associated with this scope | |
211 public: | |
212 friend class Compile; | |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
213 friend class PreserveReexecuteState; |
0 | 214 |
215 // Because JVMState objects live over the entire lifetime of the | |
216 // Compile object, they are allocated into the comp_arena, which | |
217 // does not get resource marked or reset during the compile process | |
218 void *operator new( size_t x, Compile* C ) { return C->comp_arena()->Amalloc(x); } | |
219 void operator delete( void * ) { } // fast deallocation | |
220 | |
221 // Create a new JVMState, ready for abstract interpretation. | |
222 JVMState(ciMethod* method, JVMState* caller); | |
223 JVMState(int stack_size); // root state; has a null method | |
224 | |
225 // Access functions for the JVM | |
6266
1d7922586cf6
7023639: JSR 292 method handle invocation needs a fast path for compiled code
twisti
parents:
5948
diff
changeset
|
226 // ... --|--- loc ---|--- stk ---|--- arg ---|--- mon ---|--- scl ---| |
1d7922586cf6
7023639: JSR 292 method handle invocation needs a fast path for compiled code
twisti
parents:
5948
diff
changeset
|
227 // \ locoff \ stkoff \ argoff \ monoff \ scloff \ endoff |
0 | 228 uint locoff() const { return _locoff; } |
229 uint stkoff() const { return _stkoff; } | |
230 uint argoff() const { return _stkoff + _sp; } | |
231 uint monoff() const { return _monoff; } | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
232 uint scloff() const { return _scloff; } |
0 | 233 uint endoff() const { return _endoff; } |
234 uint oopoff() const { return debug_end(); } | |
235 | |
6266
1d7922586cf6
7023639: JSR 292 method handle invocation needs a fast path for compiled code
twisti
parents:
5948
diff
changeset
|
236 int loc_size() const { return stkoff() - locoff(); } |
1d7922586cf6
7023639: JSR 292 method handle invocation needs a fast path for compiled code
twisti
parents:
5948
diff
changeset
|
237 int stk_size() const { return monoff() - stkoff(); } |
1d7922586cf6
7023639: JSR 292 method handle invocation needs a fast path for compiled code
twisti
parents:
5948
diff
changeset
|
238 int arg_size() const { return monoff() - argoff(); } |
1d7922586cf6
7023639: JSR 292 method handle invocation needs a fast path for compiled code
twisti
parents:
5948
diff
changeset
|
239 int mon_size() const { return scloff() - monoff(); } |
1d7922586cf6
7023639: JSR 292 method handle invocation needs a fast path for compiled code
twisti
parents:
5948
diff
changeset
|
240 int scl_size() const { return endoff() - scloff(); } |
0 | 241 |
6266
1d7922586cf6
7023639: JSR 292 method handle invocation needs a fast path for compiled code
twisti
parents:
5948
diff
changeset
|
242 bool is_loc(uint i) const { return locoff() <= i && i < stkoff(); } |
1d7922586cf6
7023639: JSR 292 method handle invocation needs a fast path for compiled code
twisti
parents:
5948
diff
changeset
|
243 bool is_stk(uint i) const { return stkoff() <= i && i < monoff(); } |
1d7922586cf6
7023639: JSR 292 method handle invocation needs a fast path for compiled code
twisti
parents:
5948
diff
changeset
|
244 bool is_mon(uint i) const { return monoff() <= i && i < scloff(); } |
1d7922586cf6
7023639: JSR 292 method handle invocation needs a fast path for compiled code
twisti
parents:
5948
diff
changeset
|
245 bool is_scl(uint i) const { return scloff() <= i && i < endoff(); } |
0 | 246 |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
247 uint sp() const { return _sp; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
248 int bci() const { return _bci; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
249 bool should_reexecute() const { return _reexecute==Reexecute_True; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
250 bool is_reexecute_undefined() const { return _reexecute==Reexecute_Undefined; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
251 bool has_method() const { return _method != NULL; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
252 ciMethod* method() const { assert(has_method(), ""); return _method; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
253 JVMState* caller() const { return _caller; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
254 SafePointNode* map() const { return _map; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
255 uint depth() const { return _depth; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
256 uint debug_start() const; // returns locoff of root caller |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
257 uint debug_end() const; // returns endoff of self |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
258 uint debug_size() const { |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
259 return loc_size() + sp() + mon_size() + scl_size(); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
260 } |
0 | 261 uint debug_depth() const; // returns sum of debug_size values at all depths |
262 | |
263 // Returns the JVM state at the desired depth (1 == root). | |
264 JVMState* of_depth(int d) const; | |
265 | |
266 // Tells if two JVM states have the same call chain (depth, methods, & bcis). | |
267 bool same_calls_as(const JVMState* that) const; | |
268 | |
269 // Monitors (monitors are stored as (boxNode, objNode) pairs | |
270 enum { logMonitorEdges = 1 }; | |
271 int nof_monitors() const { return mon_size() >> logMonitorEdges; } | |
272 int monitor_depth() const { return nof_monitors() + (caller() ? caller()->monitor_depth() : 0); } | |
273 int monitor_box_offset(int idx) const { return monoff() + (idx << logMonitorEdges) + 0; } | |
274 int monitor_obj_offset(int idx) const { return monoff() + (idx << logMonitorEdges) + 1; } | |
275 bool is_monitor_box(uint off) const { | |
276 assert(is_mon(off), "should be called only for monitor edge"); | |
277 return (0 == bitfield(off - monoff(), 0, logMonitorEdges)); | |
278 } | |
279 bool is_monitor_use(uint off) const { return (is_mon(off) | |
280 && is_monitor_box(off)) | |
281 || (caller() && caller()->is_monitor_use(off)); } | |
282 | |
283 // Initialization functions for the JVM | |
284 void set_locoff(uint off) { _locoff = off; } | |
285 void set_stkoff(uint off) { _stkoff = off; } | |
286 void set_monoff(uint off) { _monoff = off; } | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
287 void set_scloff(uint off) { _scloff = off; } |
0 | 288 void set_endoff(uint off) { _endoff = off; } |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
289 void set_offsets(uint off) { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
290 _locoff = _stkoff = _monoff = _scloff = _endoff = off; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
291 } |
0 | 292 void set_map(SafePointNode *map) { _map = map; } |
293 void set_sp(uint sp) { _sp = sp; } | |
900
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
294 // _reexecute is initialized to "undefined" for a new bci |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
295 void set_bci(int bci) {if(_bci != bci)_reexecute=Reexecute_Undefined; _bci = bci; } |
9987d9d5eb0e
6833129: specjvm98 fails with NullPointerException in the compiler with -XX:DeoptimizeALot
cfang
parents:
704
diff
changeset
|
296 void set_should_reexecute(bool reexec) {_reexecute = reexec ? Reexecute_True : Reexecute_False;} |
0 | 297 |
298 // Miscellaneous utility functions | |
299 JVMState* clone_deep(Compile* C) const; // recursively clones caller chain | |
300 JVMState* clone_shallow(Compile* C) const; // retains uncloned caller | |
301 | |
302 #ifndef PRODUCT | |
303 void format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) const; | |
304 void dump_spec(outputStream *st) const; | |
305 void dump_on(outputStream* st) const; | |
306 void dump() const { | |
307 dump_on(tty); | |
308 } | |
309 #endif | |
310 }; | |
311 | |
312 //------------------------------SafePointNode---------------------------------- | |
313 // A SafePointNode is a subclass of a MultiNode for convenience (and | |
314 // potential code sharing) only - conceptually it is independent of | |
315 // the Node semantics. | |
316 class SafePointNode : public MultiNode { | |
317 virtual uint cmp( const Node &n ) const; | |
318 virtual uint size_of() const; // Size is bigger | |
319 | |
320 public: | |
321 SafePointNode(uint edges, JVMState* jvms, | |
322 // A plain safepoint advertises no memory effects (NULL): | |
323 const TypePtr* adr_type = NULL) | |
324 : MultiNode( edges ), | |
325 _jvms(jvms), | |
326 _oop_map(NULL), | |
327 _adr_type(adr_type) | |
328 { | |
329 init_class_id(Class_SafePoint); | |
330 } | |
331 | |
332 OopMap* _oop_map; // Array of OopMap info (8-bit char) for GC | |
333 JVMState* const _jvms; // Pointer to list of JVM State objects | |
334 const TypePtr* _adr_type; // What type of memory does this node produce? | |
335 | |
336 // Many calls take *all* of memory as input, | |
337 // but some produce a limited subset of that memory as output. | |
338 // The adr_type reports the call's behavior as a store, not a load. | |
339 | |
340 virtual JVMState* jvms() const { return _jvms; } | |
341 void set_jvms(JVMState* s) { | |
342 *(JVMState**)&_jvms = s; // override const attribute in the accessor | |
343 } | |
344 OopMap *oop_map() const { return _oop_map; } | |
345 void set_oop_map(OopMap *om) { _oop_map = om; } | |
346 | |
7194
beebba0acc11
7172640: C2: instrinsic implementations in LibraryCallKit should use argument() instead of pop()
twisti
parents:
6266
diff
changeset
|
347 private: |
beebba0acc11
7172640: C2: instrinsic implementations in LibraryCallKit should use argument() instead of pop()
twisti
parents:
6266
diff
changeset
|
348 void verify_input(JVMState* jvms, uint idx) const { |
beebba0acc11
7172640: C2: instrinsic implementations in LibraryCallKit should use argument() instead of pop()
twisti
parents:
6266
diff
changeset
|
349 assert(verify_jvms(jvms), "jvms must match"); |
beebba0acc11
7172640: C2: instrinsic implementations in LibraryCallKit should use argument() instead of pop()
twisti
parents:
6266
diff
changeset
|
350 Node* n = in(idx); |
beebba0acc11
7172640: C2: instrinsic implementations in LibraryCallKit should use argument() instead of pop()
twisti
parents:
6266
diff
changeset
|
351 assert((!n->bottom_type()->isa_long() && !n->bottom_type()->isa_double()) || |
beebba0acc11
7172640: C2: instrinsic implementations in LibraryCallKit should use argument() instead of pop()
twisti
parents:
6266
diff
changeset
|
352 in(idx + 1)->is_top(), "2nd half of long/double"); |
beebba0acc11
7172640: C2: instrinsic implementations in LibraryCallKit should use argument() instead of pop()
twisti
parents:
6266
diff
changeset
|
353 } |
beebba0acc11
7172640: C2: instrinsic implementations in LibraryCallKit should use argument() instead of pop()
twisti
parents:
6266
diff
changeset
|
354 |
beebba0acc11
7172640: C2: instrinsic implementations in LibraryCallKit should use argument() instead of pop()
twisti
parents:
6266
diff
changeset
|
355 public: |
0 | 356 // Functionality from old debug nodes which has changed |
357 Node *local(JVMState* jvms, uint idx) const { | |
7194
beebba0acc11
7172640: C2: instrinsic implementations in LibraryCallKit should use argument() instead of pop()
twisti
parents:
6266
diff
changeset
|
358 verify_input(jvms, jvms->locoff() + idx); |
0 | 359 return in(jvms->locoff() + idx); |
360 } | |
361 Node *stack(JVMState* jvms, uint idx) const { | |
7194
beebba0acc11
7172640: C2: instrinsic implementations in LibraryCallKit should use argument() instead of pop()
twisti
parents:
6266
diff
changeset
|
362 verify_input(jvms, jvms->stkoff() + idx); |
0 | 363 return in(jvms->stkoff() + idx); |
364 } | |
365 Node *argument(JVMState* jvms, uint idx) const { | |
7194
beebba0acc11
7172640: C2: instrinsic implementations in LibraryCallKit should use argument() instead of pop()
twisti
parents:
6266
diff
changeset
|
366 verify_input(jvms, jvms->argoff() + idx); |
0 | 367 return in(jvms->argoff() + idx); |
368 } | |
369 Node *monitor_box(JVMState* jvms, uint idx) const { | |
370 assert(verify_jvms(jvms), "jvms must match"); | |
371 return in(jvms->monitor_box_offset(idx)); | |
372 } | |
373 Node *monitor_obj(JVMState* jvms, uint idx) const { | |
374 assert(verify_jvms(jvms), "jvms must match"); | |
375 return in(jvms->monitor_obj_offset(idx)); | |
376 } | |
377 | |
378 void set_local(JVMState* jvms, uint idx, Node *c); | |
379 | |
380 void set_stack(JVMState* jvms, uint idx, Node *c) { | |
381 assert(verify_jvms(jvms), "jvms must match"); | |
382 set_req(jvms->stkoff() + idx, c); | |
383 } | |
384 void set_argument(JVMState* jvms, uint idx, Node *c) { | |
385 assert(verify_jvms(jvms), "jvms must match"); | |
386 set_req(jvms->argoff() + idx, c); | |
387 } | |
388 void ensure_stack(JVMState* jvms, uint stk_size) { | |
389 assert(verify_jvms(jvms), "jvms must match"); | |
390 int grow_by = (int)stk_size - (int)jvms->stk_size(); | |
391 if (grow_by > 0) grow_stack(jvms, grow_by); | |
392 } | |
393 void grow_stack(JVMState* jvms, uint grow_by); | |
394 // Handle monitor stack | |
395 void push_monitor( const FastLockNode *lock ); | |
396 void pop_monitor (); | |
397 Node *peek_monitor_box() const; | |
398 Node *peek_monitor_obj() const; | |
399 | |
400 // Access functions for the JVM | |
401 Node *control () const { return in(TypeFunc::Control ); } | |
402 Node *i_o () const { return in(TypeFunc::I_O ); } | |
403 Node *memory () const { return in(TypeFunc::Memory ); } | |
404 Node *returnadr() const { return in(TypeFunc::ReturnAdr); } | |
405 Node *frameptr () const { return in(TypeFunc::FramePtr ); } | |
406 | |
407 void set_control ( Node *c ) { set_req(TypeFunc::Control,c); } | |
408 void set_i_o ( Node *c ) { set_req(TypeFunc::I_O ,c); } | |
409 void set_memory ( Node *c ) { set_req(TypeFunc::Memory ,c); } | |
410 | |
411 MergeMemNode* merged_memory() const { | |
412 return in(TypeFunc::Memory)->as_MergeMem(); | |
413 } | |
414 | |
415 // The parser marks useless maps as dead when it's done with them: | |
416 bool is_killed() { return in(TypeFunc::Control) == NULL; } | |
417 | |
418 // Exception states bubbling out of subgraphs such as inlined calls | |
419 // are recorded here. (There might be more than one, hence the "next".) | |
420 // This feature is used only for safepoints which serve as "maps" | |
421 // for JVM states during parsing, intrinsic expansion, etc. | |
422 SafePointNode* next_exception() const; | |
423 void set_next_exception(SafePointNode* n); | |
424 bool has_exceptions() const { return next_exception() != NULL; } | |
425 | |
426 // Standard Node stuff | |
427 virtual int Opcode() const; | |
428 virtual bool pinned() const { return true; } | |
429 virtual const Type *Value( PhaseTransform *phase ) const; | |
430 virtual const Type *bottom_type() const { return Type::CONTROL; } | |
431 virtual const TypePtr *adr_type() const { return _adr_type; } | |
432 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | |
433 virtual Node *Identity( PhaseTransform *phase ); | |
434 virtual uint ideal_reg() const { return 0; } | |
435 virtual const RegMask &in_RegMask(uint) const; | |
436 virtual const RegMask &out_RegMask() const; | |
437 virtual uint match_edge(uint idx) const; | |
438 | |
439 static bool needs_polling_address_input(); | |
440 | |
441 #ifndef PRODUCT | |
442 virtual void dump_spec(outputStream *st) const; | |
443 #endif | |
444 }; | |
445 | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
446 //------------------------------SafePointScalarObjectNode---------------------- |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
447 // A SafePointScalarObjectNode represents the state of a scalarized object |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
448 // at a safepoint. |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
449 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
450 class SafePointScalarObjectNode: public TypeNode { |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
451 uint _first_index; // First input edge index of a SafePoint node where |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
452 // states of the scalarized object fields are collected. |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
453 uint _n_fields; // Number of non-static fields of the scalarized object. |
74
2a9af0b9cb1c
6674600: (Escape Analysis) Optimize memory graph for instance's fields
kvn
parents:
65
diff
changeset
|
454 DEBUG_ONLY(AllocateNode* _alloc;) |
4115 | 455 |
456 virtual uint hash() const ; // { return NO_HASH; } | |
457 virtual uint cmp( const Node &n ) const; | |
458 | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
459 public: |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
460 SafePointScalarObjectNode(const TypeOopPtr* tp, |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
461 #ifdef ASSERT |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
462 AllocateNode* alloc, |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
463 #endif |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
464 uint first_index, uint n_fields); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
465 virtual int Opcode() const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
466 virtual uint ideal_reg() const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
467 virtual const RegMask &in_RegMask(uint) const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
468 virtual const RegMask &out_RegMask() const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
469 virtual uint match_edge(uint idx) const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
470 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
471 uint first_index() const { return _first_index; } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
472 uint n_fields() const { return _n_fields; } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
473 |
4115 | 474 #ifdef ASSERT |
475 AllocateNode* alloc() const { return _alloc; } | |
476 #endif | |
601
523ded093c31
6809798: SafePointScalarObject node placed into incorrect block during GCM
kvn
parents:
460
diff
changeset
|
477 |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
478 virtual uint size_of() const { return sizeof(*this); } |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
479 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
480 // Assumes that "this" is an argument to a safepoint node "s", and that |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
481 // "new_call" is being created to correspond to "s". But the difference |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
482 // between the start index of the jvmstates of "new_call" and "s" is |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
483 // "jvms_adj". Produce and return a SafePointScalarObjectNode that |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
484 // corresponds appropriately to "this" in "new_call". Assumes that |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
485 // "sosn_map" is a map, specific to the translation of "s" to "new_call", |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
486 // mapping old SafePointScalarObjectNodes to new, to avoid multiple copies. |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
487 SafePointScalarObjectNode* clone(int jvms_adj, Dict* sosn_map) const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
488 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
489 #ifndef PRODUCT |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
490 virtual void dump_spec(outputStream *st) const; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
491 #endif |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
492 }; |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
39
diff
changeset
|
493 |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
494 |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
495 // Simple container for the outgoing projections of a call. Useful |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
496 // for serious surgery on calls. |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
497 class CallProjections : public StackObj { |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
498 public: |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
499 Node* fallthrough_proj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
500 Node* fallthrough_catchproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
501 Node* fallthrough_memproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
502 Node* fallthrough_ioproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
503 Node* catchall_catchproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
504 Node* catchall_memproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
505 Node* catchall_ioproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
506 Node* resproj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
507 Node* exobj; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
508 }; |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
509 |
7473 | 510 class CallGenerator; |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
511 |
0 | 512 //------------------------------CallNode--------------------------------------- |
513 // Call nodes now subsume the function of debug nodes at callsites, so they | |
514 // contain the functionality of a full scope chain of debug nodes. | |
515 class CallNode : public SafePointNode { | |
3939 | 516 friend class VMStructs; |
0 | 517 public: |
518 const TypeFunc *_tf; // Function type | |
519 address _entry_point; // Address of method being called | |
520 float _cnt; // Estimate of number of times called | |
7473 | 521 CallGenerator* _generator; // corresponding CallGenerator for some late inline calls |
0 | 522 |
523 CallNode(const TypeFunc* tf, address addr, const TypePtr* adr_type) | |
524 : SafePointNode(tf->domain()->cnt(), NULL, adr_type), | |
525 _tf(tf), | |
526 _entry_point(addr), | |
7473 | 527 _cnt(COUNT_UNKNOWN), |
528 _generator(NULL) | |
0 | 529 { |
530 init_class_id(Class_Call); | |
531 } | |
532 | |
7473 | 533 const TypeFunc* tf() const { return _tf; } |
534 const address entry_point() const { return _entry_point; } | |
535 const float cnt() const { return _cnt; } | |
536 CallGenerator* generator() const { return _generator; } | |
0 | 537 |
7473 | 538 void set_tf(const TypeFunc* tf) { _tf = tf; } |
539 void set_entry_point(address p) { _entry_point = p; } | |
540 void set_cnt(float c) { _cnt = c; } | |
541 void set_generator(CallGenerator* cg) { _generator = cg; } | |
0 | 542 |
543 virtual const Type *bottom_type() const; | |
544 virtual const Type *Value( PhaseTransform *phase ) const; | |
7473 | 545 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); |
0 | 546 virtual Node *Identity( PhaseTransform *phase ) { return this; } |
547 virtual uint cmp( const Node &n ) const; | |
548 virtual uint size_of() const = 0; | |
549 virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const; | |
550 virtual Node *match( const ProjNode *proj, const Matcher *m ); | |
551 virtual uint ideal_reg() const { return NotAMachineReg; } | |
552 // Are we guaranteed that this node is a safepoint? Not true for leaf calls and | |
553 // for some macro nodes whose expansion does not have a safepoint on the fast path. | |
554 virtual bool guaranteed_safepoint() { return true; } | |
555 // For macro nodes, the JVMState gets modified during expansion, so when cloning | |
556 // the node the JVMState must be cloned. | |
557 virtual void clone_jvms() { } // default is not to clone | |
558 | |
65 | 559 // Returns true if the call may modify n |
560 virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase); | |
561 // Does this node have a use of n other than in debug information? | |
168
7793bd37a336
6705887: Compressed Oops: generate x64 addressing and implicit null checks with narrow oops
kvn
parents:
113
diff
changeset
|
562 bool has_non_debug_use(Node *n); |
65 | 563 // Returns the unique CheckCastPP of a call |
564 // or result projection is there are several CheckCastPP | |
565 // or returns NULL if there is no one. | |
566 Node *result_cast(); | |
5948
ee138854b3a6
7147744: CTW: assert(false) failed: infinite EA connection graph build
kvn
parents:
4777
diff
changeset
|
567 // Does this node returns pointer? |
ee138854b3a6
7147744: CTW: assert(false) failed: infinite EA connection graph build
kvn
parents:
4777
diff
changeset
|
568 bool returns_pointer() const { |
ee138854b3a6
7147744: CTW: assert(false) failed: infinite EA connection graph build
kvn
parents:
4777
diff
changeset
|
569 const TypeTuple *r = tf()->range(); |
ee138854b3a6
7147744: CTW: assert(false) failed: infinite EA connection graph build
kvn
parents:
4777
diff
changeset
|
570 return (r->cnt() > TypeFunc::Parms && |
ee138854b3a6
7147744: CTW: assert(false) failed: infinite EA connection graph build
kvn
parents:
4777
diff
changeset
|
571 r->field_at(TypeFunc::Parms)->isa_ptr()); |
ee138854b3a6
7147744: CTW: assert(false) failed: infinite EA connection graph build
kvn
parents:
4777
diff
changeset
|
572 } |
65 | 573 |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
574 // Collect all the interesting edges from a call for use in |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
575 // replacing the call by something else. Used by macro expansion |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
576 // and the late inlining support. |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
577 void extract_projections(CallProjections* projs, bool separate_io_proj); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
578 |
0 | 579 virtual uint match_edge(uint idx) const; |
580 | |
581 #ifndef PRODUCT | |
7636
a7114d3d712e
8005055: pass outputStream to more opto debug routines
kvn
parents:
7473
diff
changeset
|
582 virtual void dump_req(outputStream *st = tty) const; |
0 | 583 virtual void dump_spec(outputStream *st) const; |
584 #endif | |
585 }; | |
586 | |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
903
diff
changeset
|
587 |
0 | 588 //------------------------------CallJavaNode----------------------------------- |
589 // Make a static or dynamic subroutine call node using Java calling | |
590 // convention. (The "Java" calling convention is the compiler's calling | |
591 // convention, as opposed to the interpreter's or that of native C.) | |
592 class CallJavaNode : public CallNode { | |
3939 | 593 friend class VMStructs; |
0 | 594 protected: |
595 virtual uint cmp( const Node &n ) const; | |
596 virtual uint size_of() const; // Size is bigger | |
597 | |
598 bool _optimized_virtual; | |
1137
97125851f396
6829187: compiler optimizations required for JSR 292
twisti
parents:
1080
diff
changeset
|
599 bool _method_handle_invoke; |
0 | 600 ciMethod* _method; // Method being direct called |
601 public: | |
602 const int _bci; // Byte Code Index of call byte code | |
603 CallJavaNode(const TypeFunc* tf , address addr, ciMethod* method, int bci) | |
604 : CallNode(tf, addr, TypePtr::BOTTOM), | |
1137
97125851f396
6829187: compiler optimizations required for JSR 292
twisti
parents:
1080
diff
changeset
|
605 _method(method), _bci(bci), |
97125851f396
6829187: compiler optimizations required for JSR 292
twisti
parents:
1080
diff
changeset
|
606 _optimized_virtual(false), |
97125851f396
6829187: compiler optimizations required for JSR 292
twisti
parents:
1080
diff
changeset
|
607 _method_handle_invoke(false) |
0 | 608 { |
609 init_class_id(Class_CallJava); | |
610 } | |
611 | |
612 virtual int Opcode() const; | |
613 ciMethod* method() const { return _method; } | |
614 void set_method(ciMethod *m) { _method = m; } | |
615 void set_optimized_virtual(bool f) { _optimized_virtual = f; } | |
616 bool is_optimized_virtual() const { return _optimized_virtual; } | |
1137
97125851f396
6829187: compiler optimizations required for JSR 292
twisti
parents:
1080
diff
changeset
|
617 void set_method_handle_invoke(bool f) { _method_handle_invoke = f; } |
97125851f396
6829187: compiler optimizations required for JSR 292
twisti
parents:
1080
diff
changeset
|
618 bool is_method_handle_invoke() const { return _method_handle_invoke; } |
0 | 619 |
620 #ifndef PRODUCT | |
621 virtual void dump_spec(outputStream *st) const; | |
622 #endif | |
623 }; | |
624 | |
625 //------------------------------CallStaticJavaNode----------------------------- | |
626 // Make a direct subroutine call using Java calling convention (for static | |
627 // calls and optimized virtual calls, plus calls to wrappers for run-time | |
628 // routines); generates static stub. | |
629 class CallStaticJavaNode : public CallJavaNode { | |
630 virtual uint cmp( const Node &n ) const; | |
631 virtual uint size_of() const; // Size is bigger | |
632 public: | |
633 CallStaticJavaNode(const TypeFunc* tf, address addr, ciMethod* method, int bci) | |
634 : CallJavaNode(tf, addr, method, bci), _name(NULL) { | |
635 init_class_id(Class_CallStaticJava); | |
636 } | |
637 CallStaticJavaNode(const TypeFunc* tf, address addr, const char* name, int bci, | |
638 const TypePtr* adr_type) | |
639 : CallJavaNode(tf, addr, NULL, bci), _name(name) { | |
640 init_class_id(Class_CallStaticJava); | |
641 // This node calls a runtime stub, which often has narrow memory effects. | |
642 _adr_type = adr_type; | |
643 } | |
644 const char *_name; // Runtime wrapper name | |
645 | |
646 // If this is an uncommon trap, return the request code, else zero. | |
647 int uncommon_trap_request() const; | |
648 static int extract_uncommon_trap_request(const Node* call); | |
649 | |
650 virtual int Opcode() const; | |
651 #ifndef PRODUCT | |
652 virtual void dump_spec(outputStream *st) const; | |
653 #endif | |
654 }; | |
655 | |
656 //------------------------------CallDynamicJavaNode---------------------------- | |
657 // Make a dispatched call using Java calling convention. | |
658 class CallDynamicJavaNode : public CallJavaNode { | |
659 virtual uint cmp( const Node &n ) const; | |
660 virtual uint size_of() const; // Size is bigger | |
661 public: | |
662 CallDynamicJavaNode( const TypeFunc *tf , address addr, ciMethod* method, int vtable_index, int bci ) : CallJavaNode(tf,addr,method,bci), _vtable_index(vtable_index) { | |
663 init_class_id(Class_CallDynamicJava); | |
664 } | |
665 | |
666 int _vtable_index; | |
667 virtual int Opcode() const; | |
668 #ifndef PRODUCT | |
669 virtual void dump_spec(outputStream *st) const; | |
670 #endif | |
671 }; | |
672 | |
673 //------------------------------CallRuntimeNode-------------------------------- | |
674 // Make a direct subroutine call node into compiled C++ code. | |
675 class CallRuntimeNode : public CallNode { | |
676 virtual uint cmp( const Node &n ) const; | |
677 virtual uint size_of() const; // Size is bigger | |
678 public: | |
679 CallRuntimeNode(const TypeFunc* tf, address addr, const char* name, | |
680 const TypePtr* adr_type) | |
681 : CallNode(tf, addr, adr_type), | |
682 _name(name) | |
683 { | |
684 init_class_id(Class_CallRuntime); | |
685 } | |
686 | |
687 const char *_name; // Printable name, if _method is NULL | |
688 virtual int Opcode() const; | |
689 virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const; | |
690 | |
691 #ifndef PRODUCT | |
692 virtual void dump_spec(outputStream *st) const; | |
693 #endif | |
694 }; | |
695 | |
696 //------------------------------CallLeafNode----------------------------------- | |
697 // Make a direct subroutine call node into compiled C++ code, without | |
698 // safepoints | |
699 class CallLeafNode : public CallRuntimeNode { | |
700 public: | |
701 CallLeafNode(const TypeFunc* tf, address addr, const char* name, | |
702 const TypePtr* adr_type) | |
703 : CallRuntimeNode(tf, addr, name, adr_type) | |
704 { | |
705 init_class_id(Class_CallLeaf); | |
706 } | |
707 virtual int Opcode() const; | |
708 virtual bool guaranteed_safepoint() { return false; } | |
709 #ifndef PRODUCT | |
710 virtual void dump_spec(outputStream *st) const; | |
711 #endif | |
712 }; | |
713 | |
714 //------------------------------CallLeafNoFPNode------------------------------- | |
715 // CallLeafNode, not using floating point or using it in the same manner as | |
716 // the generated code | |
717 class CallLeafNoFPNode : public CallLeafNode { | |
718 public: | |
719 CallLeafNoFPNode(const TypeFunc* tf, address addr, const char* name, | |
720 const TypePtr* adr_type) | |
721 : CallLeafNode(tf, addr, name, adr_type) | |
722 { | |
723 } | |
724 virtual int Opcode() const; | |
725 }; | |
726 | |
727 | |
728 //------------------------------Allocate--------------------------------------- | |
729 // High-level memory allocation | |
730 // | |
731 // AllocateNode and AllocateArrayNode are subclasses of CallNode because they will | |
732 // get expanded into a code sequence containing a call. Unlike other CallNodes, | |
733 // they have 2 memory projections and 2 i_o projections (which are distinguished by | |
734 // the _is_io_use flag in the projection.) This is needed when expanding the node in | |
735 // order to differentiate the uses of the projection on the normal control path from | |
736 // those on the exception return path. | |
737 // | |
738 class AllocateNode : public CallNode { | |
739 public: | |
740 enum { | |
741 // Output: | |
742 RawAddress = TypeFunc::Parms, // the newly-allocated raw address | |
743 // Inputs: | |
744 AllocSize = TypeFunc::Parms, // size (in bytes) of the new object | |
745 KlassNode, // type (maybe dynamic) of the obj. | |
746 InitialTest, // slow-path test (may be constant) | |
747 ALength, // array length (or TOP if none) | |
748 ParmLimit | |
749 }; | |
750 | |
751 static const TypeFunc* alloc_type() { | |
752 const Type** fields = TypeTuple::fields(ParmLimit - TypeFunc::Parms); | |
753 fields[AllocSize] = TypeInt::POS; | |
754 fields[KlassNode] = TypeInstPtr::NOTNULL; | |
755 fields[InitialTest] = TypeInt::BOOL; | |
756 fields[ALength] = TypeInt::INT; // length (can be a bad length) | |
757 | |
758 const TypeTuple *domain = TypeTuple::make(ParmLimit, fields); | |
759 | |
760 // create result type (range) | |
761 fields = TypeTuple::fields(1); | |
762 fields[TypeFunc::Parms+0] = TypeRawPtr::NOTNULL; // Returned oop | |
763 | |
764 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+1, fields); | |
765 | |
766 return TypeFunc::make(domain, range); | |
767 } | |
768 | |
39
76256d272075
6667612: (Escape Analysis) disable loop cloning if it has a scalar replaceable allocation
kvn
parents:
33
diff
changeset
|
769 bool _is_scalar_replaceable; // Result of Escape Analysis |
76256d272075
6667612: (Escape Analysis) disable loop cloning if it has a scalar replaceable allocation
kvn
parents:
33
diff
changeset
|
770 |
0 | 771 virtual uint size_of() const; // Size is bigger |
772 AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio, | |
773 Node *size, Node *klass_node, Node *initial_test); | |
774 // Expansion modifies the JVMState, so we need to clone it | |
775 virtual void clone_jvms() { | |
776 set_jvms(jvms()->clone_deep(Compile::current())); | |
777 } | |
778 virtual int Opcode() const; | |
779 virtual uint ideal_reg() const { return Op_RegP; } | |
780 virtual bool guaranteed_safepoint() { return false; } | |
781 | |
65 | 782 // allocations do not modify their arguments |
783 virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase) { return false;} | |
784 | |
0 | 785 // Pattern-match a possible usage of AllocateNode. |
786 // Return null if no allocation is recognized. | |
787 // The operand is the pointer produced by the (possible) allocation. | |
788 // It must be a projection of the Allocate or its subsequent CastPP. | |
789 // (Note: This function is defined in file graphKit.cpp, near | |
790 // GraphKit::new_instance/new_array, whose output it recognizes.) | |
791 // The 'ptr' may not have an offset unless the 'offset' argument is given. | |
792 static AllocateNode* Ideal_allocation(Node* ptr, PhaseTransform* phase); | |
793 | |
794 // Fancy version which uses AddPNode::Ideal_base_and_offset to strip | |
795 // an offset, which is reported back to the caller. | |
796 // (Note: AllocateNode::Ideal_allocation is defined in graphKit.cpp.) | |
797 static AllocateNode* Ideal_allocation(Node* ptr, PhaseTransform* phase, | |
798 intptr_t& offset); | |
799 | |
800 // Dig the klass operand out of a (possible) allocation site. | |
801 static Node* Ideal_klass(Node* ptr, PhaseTransform* phase) { | |
802 AllocateNode* allo = Ideal_allocation(ptr, phase); | |
803 return (allo == NULL) ? NULL : allo->in(KlassNode); | |
804 } | |
805 | |
806 // Conservatively small estimate of offset of first non-header byte. | |
807 int minimum_header_size() { | |
113
ba764ed4b6f2
6420645: Create a vm that uses compressed oops for up to 32gb heapsizes
coleenp
parents:
74
diff
changeset
|
808 return is_AllocateArray() ? arrayOopDesc::base_offset_in_bytes(T_BYTE) : |
ba764ed4b6f2
6420645: Create a vm that uses compressed oops for up to 32gb heapsizes
coleenp
parents:
74
diff
changeset
|
809 instanceOopDesc::base_offset_in_bytes(); |
0 | 810 } |
811 | |
812 // Return the corresponding initialization barrier (or null if none). | |
813 // Walks out edges to find it... | |
814 // (Note: Both InitializeNode::allocation and AllocateNode::initialization | |
815 // are defined in graphKit.cpp, which sets up the bidirectional relation.) | |
816 InitializeNode* initialization(); | |
817 | |
4763
1dc233a8c7fe
7121140: Allocation paths require explicit memory synchronization operations for RMO systems
roland
parents:
4115
diff
changeset
|
818 // Return the corresponding storestore barrier (or null if none). |
1dc233a8c7fe
7121140: Allocation paths require explicit memory synchronization operations for RMO systems
roland
parents:
4115
diff
changeset
|
819 // Walks out edges to find it... |
1dc233a8c7fe
7121140: Allocation paths require explicit memory synchronization operations for RMO systems
roland
parents:
4115
diff
changeset
|
820 MemBarStoreStoreNode* storestore(); |
1dc233a8c7fe
7121140: Allocation paths require explicit memory synchronization operations for RMO systems
roland
parents:
4115
diff
changeset
|
821 |
0 | 822 // Convenience for initialization->maybe_set_complete(phase) |
823 bool maybe_set_complete(PhaseGVN* phase); | |
824 }; | |
825 | |
826 //------------------------------AllocateArray--------------------------------- | |
827 // | |
828 // High-level array allocation | |
829 // | |
830 class AllocateArrayNode : public AllocateNode { | |
831 public: | |
832 AllocateArrayNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio, | |
833 Node* size, Node* klass_node, Node* initial_test, | |
834 Node* count_val | |
835 ) | |
836 : AllocateNode(C, atype, ctrl, mem, abio, size, klass_node, | |
837 initial_test) | |
838 { | |
839 init_class_id(Class_AllocateArray); | |
840 set_req(AllocateNode::ALength, count_val); | |
841 } | |
842 virtual int Opcode() const; | |
843 virtual uint size_of() const; // Size is bigger | |
704
ad8c635e757e
6823453: DeoptimizeALot causes fastdebug server jvm to fail with assert(false,"unscheduable graph")
kvn
parents:
601
diff
changeset
|
844 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); |
0 | 845 |
366
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
846 // Dig the length operand out of a array allocation site. |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
847 Node* Ideal_length() { |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
848 return in(AllocateNode::ALength); |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
849 } |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
850 |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
851 // Dig the length operand out of a array allocation site and narrow the |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
852 // type with a CastII, if necesssary |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
853 Node* make_ideal_length(const TypeOopPtr* ary_type, PhaseTransform *phase, bool can_create = true); |
8261ee795323
6711100: 64bit fastdebug server vm crashes with assert(_base == Int,"Not an Int")
rasbold
parents:
196
diff
changeset
|
854 |
0 | 855 // Pattern-match a possible usage of AllocateArrayNode. |
856 // Return null if no allocation is recognized. | |
857 static AllocateArrayNode* Ideal_array_allocation(Node* ptr, PhaseTransform* phase) { | |
858 AllocateNode* allo = Ideal_allocation(ptr, phase); | |
859 return (allo == NULL || !allo->is_AllocateArray()) | |
860 ? NULL : allo->as_AllocateArray(); | |
861 } | |
862 }; | |
863 | |
864 //------------------------------AbstractLockNode----------------------------------- | |
865 class AbstractLockNode: public CallNode { | |
866 private: | |
4777 | 867 enum { |
868 Regular = 0, // Normal lock | |
869 NonEscObj, // Lock is used for non escaping object | |
870 Coarsened, // Lock was coarsened | |
871 Nested // Nested lock | |
872 } _kind; | |
0 | 873 #ifndef PRODUCT |
874 NamedCounter* _counter; | |
875 #endif | |
876 | |
877 protected: | |
878 // helper functions for lock elimination | |
879 // | |
880 | |
881 bool find_matching_unlock(const Node* ctrl, LockNode* lock, | |
882 GrowableArray<AbstractLockNode*> &lock_ops); | |
883 bool find_lock_and_unlock_through_if(Node* node, LockNode* lock, | |
884 GrowableArray<AbstractLockNode*> &lock_ops); | |
885 bool find_unlocks_for_region(const RegionNode* region, LockNode* lock, | |
886 GrowableArray<AbstractLockNode*> &lock_ops); | |
887 LockNode *find_matching_lock(UnlockNode* unlock); | |
888 | |
4777 | 889 // Update the counter to indicate that this lock was eliminated. |
890 void set_eliminated_lock_counter() PRODUCT_RETURN; | |
0 | 891 |
892 public: | |
893 AbstractLockNode(const TypeFunc *tf) | |
894 : CallNode(tf, NULL, TypeRawPtr::BOTTOM), | |
4777 | 895 _kind(Regular) |
0 | 896 { |
897 #ifndef PRODUCT | |
898 _counter = NULL; | |
899 #endif | |
900 } | |
901 virtual int Opcode() const = 0; | |
902 Node * obj_node() const {return in(TypeFunc::Parms + 0); } | |
903 Node * box_node() const {return in(TypeFunc::Parms + 1); } | |
904 Node * fastlock_node() const {return in(TypeFunc::Parms + 2); } | |
4777 | 905 void set_box_node(Node* box) { set_req(TypeFunc::Parms + 1, box); } |
906 | |
0 | 907 const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;} |
908 | |
909 virtual uint size_of() const { return sizeof(*this); } | |
910 | |
4777 | 911 bool is_eliminated() const { return (_kind != Regular); } |
912 bool is_non_esc_obj() const { return (_kind == NonEscObj); } | |
913 bool is_coarsened() const { return (_kind == Coarsened); } | |
914 bool is_nested() const { return (_kind == Nested); } | |
0 | 915 |
4777 | 916 void set_non_esc_obj() { _kind = NonEscObj; set_eliminated_lock_counter(); } |
917 void set_coarsened() { _kind = Coarsened; set_eliminated_lock_counter(); } | |
918 void set_nested() { _kind = Nested; set_eliminated_lock_counter(); } | |
460
424f9bfe6b96
6775880: EA +DeoptimizeALot: assert(mon_info->owner()->is_locked(),"object must be locked now")
kvn
parents:
420
diff
changeset
|
919 |
65 | 920 // locking does not modify its arguments |
4777 | 921 virtual bool may_modify(const TypePtr *addr_t, PhaseTransform *phase){ return false;} |
65 | 922 |
0 | 923 #ifndef PRODUCT |
924 void create_lock_counter(JVMState* s); | |
925 NamedCounter* counter() const { return _counter; } | |
926 #endif | |
927 }; | |
928 | |
929 //------------------------------Lock--------------------------------------- | |
930 // High-level lock operation | |
931 // | |
932 // This is a subclass of CallNode because it is a macro node which gets expanded | |
933 // into a code sequence containing a call. This node takes 3 "parameters": | |
934 // 0 - object to lock | |
935 // 1 - a BoxLockNode | |
936 // 2 - a FastLockNode | |
937 // | |
938 class LockNode : public AbstractLockNode { | |
939 public: | |
940 | |
941 static const TypeFunc *lock_type() { | |
942 // create input type (domain) | |
943 const Type **fields = TypeTuple::fields(3); | |
944 fields[TypeFunc::Parms+0] = TypeInstPtr::NOTNULL; // Object to be Locked | |
945 fields[TypeFunc::Parms+1] = TypeRawPtr::BOTTOM; // Address of stack location for lock | |
946 fields[TypeFunc::Parms+2] = TypeInt::BOOL; // FastLock | |
947 const TypeTuple *domain = TypeTuple::make(TypeFunc::Parms+3,fields); | |
948 | |
949 // create result type (range) | |
950 fields = TypeTuple::fields(0); | |
951 | |
952 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0,fields); | |
953 | |
954 return TypeFunc::make(domain,range); | |
955 } | |
956 | |
957 virtual int Opcode() const; | |
958 virtual uint size_of() const; // Size is bigger | |
959 LockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) { | |
960 init_class_id(Class_Lock); | |
961 init_flags(Flag_is_macro); | |
962 C->add_macro_node(this); | |
963 } | |
964 virtual bool guaranteed_safepoint() { return false; } | |
965 | |
966 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | |
967 // Expansion modifies the JVMState, so we need to clone it | |
968 virtual void clone_jvms() { | |
969 set_jvms(jvms()->clone_deep(Compile::current())); | |
970 } | |
4777 | 971 |
972 bool is_nested_lock_region(); // Is this Lock nested? | |
0 | 973 }; |
974 | |
975 //------------------------------Unlock--------------------------------------- | |
976 // High-level unlock operation | |
977 class UnlockNode : public AbstractLockNode { | |
978 public: | |
979 virtual int Opcode() const; | |
980 virtual uint size_of() const; // Size is bigger | |
981 UnlockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) { | |
982 init_class_id(Class_Unlock); | |
983 init_flags(Flag_is_macro); | |
984 C->add_macro_node(this); | |
985 } | |
986 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); | |
987 // unlock is never a safepoint | |
988 virtual bool guaranteed_safepoint() { return false; } | |
989 }; | |
1972 | 990 |
991 #endif // SHARE_VM_OPTO_CALLNODE_HPP |