Mercurial > hg > truffle
annotate src/share/vm/opto/compile.hpp @ 4710:41406797186b
7113012: G1: rename not-fully-young GCs as "mixed"
Summary: Renamed partially-young GCs as mixed and fully-young GCs as young. Change all external output that includes those terms (GC log and GC ergo log) as well as any comments, fields, methods, etc. The changeset also includes very minor code tidying up (added some curly brackets).
Reviewed-by: johnc, brutisso
author | tonyp |
---|---|
date | Fri, 16 Dec 2011 02:14:27 -0500 |
parents | 6729bbc1fcd6 |
children | b40ac3579043 |
rev | line source |
---|---|
0 | 1 /* |
2376
c7f3d0b4570f
7017732: move static fields into Class to prepare for perm gen removal
never
parents:
2008
diff
changeset
|
2 * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved. |
0 | 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 * | |
5 * This code is free software; you can redistribute it and/or modify it | |
6 * under the terms of the GNU General Public License version 2 only, as | |
7 * published by the Free Software Foundation. | |
8 * | |
9 * This code is distributed in the hope that it will be useful, but WITHOUT | |
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
12 * version 2 for more details (a copy is included in the LICENSE file that | |
13 * accompanied this code). | |
14 * | |
15 * You should have received a copy of the GNU General Public License version | |
16 * 2 along with this work; if not, write to the Free Software Foundation, | |
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. | |
18 * | |
1552
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1397
diff
changeset
|
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1397
diff
changeset
|
20 * or visit www.oracle.com if you need additional information or have any |
c18cbe5936b8
6941466: Oracle rebranding changes for Hotspot repositories
trims
parents:
1397
diff
changeset
|
21 * questions. |
0 | 22 * |
23 */ | |
24 | |
1972 | 25 #ifndef SHARE_VM_OPTO_COMPILE_HPP |
26 #define SHARE_VM_OPTO_COMPILE_HPP | |
27 | |
28 #include "asm/codeBuffer.hpp" | |
29 #include "ci/compilerInterface.hpp" | |
30 #include "code/debugInfoRec.hpp" | |
31 #include "code/exceptionHandlerTable.hpp" | |
32 #include "compiler/compilerOracle.hpp" | |
33 #include "libadt/dict.hpp" | |
34 #include "libadt/port.hpp" | |
35 #include "libadt/vectset.hpp" | |
36 #include "memory/resourceArea.hpp" | |
37 #include "opto/idealGraphPrinter.hpp" | |
38 #include "opto/phase.hpp" | |
39 #include "opto/regmask.hpp" | |
40 #include "runtime/deoptimization.hpp" | |
41 #include "runtime/vmThread.hpp" | |
42 | |
0 | 43 class Block; |
44 class Bundle; | |
45 class C2Compiler; | |
46 class CallGenerator; | |
47 class ConnectionGraph; | |
48 class InlineTree; | |
49 class Int_Array; | |
50 class Matcher; | |
2008 | 51 class MachConstantNode; |
52 class MachConstantBaseNode; | |
0 | 53 class MachNode; |
2008 | 54 class MachOper; |
38
b789bcaf2dd9
6667610: (Escape Analysis) retry compilation without EA if it fails
kvn
parents:
0
diff
changeset
|
55 class MachSafePointNode; |
0 | 56 class Node; |
57 class Node_Array; | |
58 class Node_Notes; | |
59 class OptoReg; | |
60 class PhaseCFG; | |
61 class PhaseGVN; | |
1172 | 62 class PhaseIterGVN; |
0 | 63 class PhaseRegAlloc; |
64 class PhaseCCP; | |
65 class PhaseCCP_DCE; | |
66 class RootNode; | |
67 class relocInfo; | |
68 class Scope; | |
69 class StartNode; | |
70 class SafePointNode; | |
71 class JVMState; | |
72 class TypeData; | |
73 class TypePtr; | |
74 class TypeFunc; | |
75 class Unique_Node_List; | |
76 class nmethod; | |
77 class WarmCallInfo; | |
78 | |
79 //------------------------------Compile---------------------------------------- | |
80 // This class defines a top-level Compiler invocation. | |
81 | |
82 class Compile : public Phase { | |
3939 | 83 friend class VMStructs; |
84 | |
0 | 85 public: |
86 // Fixed alias indexes. (See also MergeMemNode.) | |
87 enum { | |
88 AliasIdxTop = 1, // pseudo-index, aliases to nothing (used as sentinel value) | |
89 AliasIdxBot = 2, // pseudo-index, aliases to everything | |
90 AliasIdxRaw = 3 // hard-wired index for TypeRawPtr::BOTTOM | |
91 }; | |
92 | |
93 // Variant of TraceTime(NULL, &_t_accumulator, TimeCompiler); | |
94 // Integrated with logging. If logging is turned on, and dolog is true, | |
95 // then brackets are put into the log, with time stamps and node counts. | |
96 // (The time collection itself is always conditionalized on TimeCompiler.) | |
97 class TracePhase : public TraceTime { | |
98 private: | |
99 Compile* C; | |
100 CompileLog* _log; | |
101 public: | |
102 TracePhase(const char* name, elapsedTimer* accumulator, bool dolog); | |
103 ~TracePhase(); | |
104 }; | |
105 | |
106 // Information per category of alias (memory slice) | |
107 class AliasType { | |
108 private: | |
109 friend class Compile; | |
110 | |
111 int _index; // unique index, used with MergeMemNode | |
112 const TypePtr* _adr_type; // normalized address type | |
113 ciField* _field; // relevant instance field, or null if none | |
114 bool _is_rewritable; // false if the memory is write-once only | |
115 int _general_index; // if this is type is an instance, the general | |
116 // type that this is an instance of | |
117 | |
118 void Init(int i, const TypePtr* at); | |
119 | |
120 public: | |
121 int index() const { return _index; } | |
122 const TypePtr* adr_type() const { return _adr_type; } | |
123 ciField* field() const { return _field; } | |
124 bool is_rewritable() const { return _is_rewritable; } | |
125 bool is_volatile() const { return (_field ? _field->is_volatile() : false); } | |
126 int general_index() const { return (_general_index != 0) ? _general_index : _index; } | |
127 | |
128 void set_rewritable(bool z) { _is_rewritable = z; } | |
129 void set_field(ciField* f) { | |
130 assert(!_field,""); | |
131 _field = f; | |
132 if (f->is_final()) _is_rewritable = false; | |
133 } | |
134 | |
135 void print_on(outputStream* st) PRODUCT_RETURN; | |
136 }; | |
137 | |
138 enum { | |
139 logAliasCacheSize = 6, | |
140 AliasCacheSize = (1<<logAliasCacheSize) | |
141 }; | |
142 struct AliasCacheEntry { const TypePtr* _adr_type; int _index; }; // simple duple type | |
143 enum { | |
144 trapHistLength = methodDataOopDesc::_trap_hist_limit | |
145 }; | |
146 | |
2008 | 147 // Constant entry of the constant table. |
148 class Constant { | |
149 private: | |
150 BasicType _type; | |
151 jvalue _value; | |
152 int _offset; // offset of this constant (in bytes) relative to the constant table base. | |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
153 float _freq; |
2008 | 154 bool _can_be_reused; // true (default) if the value can be shared with other users. |
155 | |
156 public: | |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
157 Constant() : _type(T_ILLEGAL), _offset(-1), _freq(0.0f), _can_be_reused(true) { _value.l = 0; } |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
158 Constant(BasicType type, jvalue value, float freq = 0.0f, bool can_be_reused = true) : |
2008 | 159 _type(type), |
160 _value(value), | |
161 _offset(-1), | |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
162 _freq(freq), |
2008 | 163 _can_be_reused(can_be_reused) |
164 {} | |
165 | |
166 bool operator==(const Constant& other); | |
167 | |
168 BasicType type() const { return _type; } | |
169 | |
170 jlong get_jlong() const { return _value.j; } | |
171 jfloat get_jfloat() const { return _value.f; } | |
172 jdouble get_jdouble() const { return _value.d; } | |
173 jobject get_jobject() const { return _value.l; } | |
174 | |
175 int offset() const { return _offset; } | |
176 void set_offset(int offset) { _offset = offset; } | |
177 | |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
178 float freq() const { return _freq; } |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
179 void inc_freq(float freq) { _freq += freq; } |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
180 |
2008 | 181 bool can_be_reused() const { return _can_be_reused; } |
182 }; | |
183 | |
184 // Constant table. | |
185 class ConstantTable { | |
186 private: | |
187 GrowableArray<Constant> _constants; // Constants of this table. | |
188 int _size; // Size in bytes the emitted constant table takes (including padding). | |
189 int _table_base_offset; // Offset of the table base that gets added to the constant offsets. | |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
190 int _nof_jump_tables; // Number of jump-tables in this constant table. |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
191 |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
192 static int qsort_comparator(Constant* a, Constant* b); |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
193 |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
194 // We use negative frequencies to keep the order of the |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
195 // jump-tables in which they were added. Otherwise we get into |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
196 // trouble with relocation. |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
197 float next_jump_table_freq() { return -1.0f * (++_nof_jump_tables); } |
2008 | 198 |
199 public: | |
200 ConstantTable() : | |
201 _size(-1), | |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
202 _table_base_offset(-1), // We can use -1 here since the constant table is always bigger than 2 bytes (-(size / 2), see MachConstantBaseNode::emit). |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
203 _nof_jump_tables(0) |
2008 | 204 {} |
205 | |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
206 int size() const { assert(_size != -1, "not calculated yet"); return _size; } |
2008 | 207 |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
208 int calculate_table_base_offset() const; // AD specific |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
209 void set_table_base_offset(int x) { assert(_table_base_offset == -1 || x == _table_base_offset, "can't change"); _table_base_offset = x; } |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
210 int table_base_offset() const { assert(_table_base_offset != -1, "not set yet"); return _table_base_offset; } |
2008 | 211 |
212 void emit(CodeBuffer& cb); | |
213 | |
214 // Returns the offset of the last entry (the top) of the constant table. | |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
215 int top_offset() const { assert(_constants.top().offset() != -1, "not bound yet"); return _constants.top().offset(); } |
2008 | 216 |
217 void calculate_offsets_and_size(); | |
218 int find_offset(Constant& con) const; | |
219 | |
220 void add(Constant& con); | |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
221 Constant add(MachConstantNode* n, BasicType type, jvalue value); |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
222 Constant add(MachConstantNode* n, MachOper* oper); |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
223 Constant add(MachConstantNode* n, jfloat f) { |
2008 | 224 jvalue value; value.f = f; |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
225 return add(n, T_FLOAT, value); |
2008 | 226 } |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
227 Constant add(MachConstantNode* n, jdouble d) { |
2008 | 228 jvalue value; value.d = d; |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
229 return add(n, T_DOUBLE, value); |
2008 | 230 } |
231 | |
4114
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
232 // Jump-table |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
233 Constant add_jump_table(MachConstantNode* n); |
6729bbc1fcd6
7003454: order constants in constant table by number of references in code
twisti
parents:
3939
diff
changeset
|
234 void fill_jump_table(CodeBuffer& cb, MachConstantNode* n, GrowableArray<Label*> labels) const; |
2008 | 235 }; |
236 | |
0 | 237 private: |
238 // Fixed parameters to this compilation. | |
239 const int _compile_id; | |
240 const bool _save_argument_registers; // save/restore arg regs for trampolines | |
241 const bool _subsume_loads; // Load can be matched as part of a larger op. | |
38
b789bcaf2dd9
6667610: (Escape Analysis) retry compilation without EA if it fails
kvn
parents:
0
diff
changeset
|
242 const bool _do_escape_analysis; // Do escape analysis. |
0 | 243 ciMethod* _method; // The method being compiled. |
244 int _entry_bci; // entry bci for osr methods. | |
245 const TypeFunc* _tf; // My kind of signature | |
246 InlineTree* _ilt; // Ditto (temporary). | |
247 address _stub_function; // VM entry for stub being compiled, or NULL | |
248 const char* _stub_name; // Name of stub or adapter being compiled, or NULL | |
249 address _stub_entry_point; // Compile code entry for generated stub, or NULL | |
250 | |
251 // Control of this compilation. | |
252 int _num_loop_opts; // Number of iterations for doing loop optimiztions | |
253 int _max_inline_size; // Max inline size for this compilation | |
254 int _freq_inline_size; // Max hot method inline size for this compilation | |
255 int _fixed_slots; // count of frame slots not allocated by the register | |
256 // allocator i.e. locks, original deopt pc, etc. | |
257 // For deopt | |
258 int _orig_pc_slot; | |
259 int _orig_pc_slot_offset_in_bytes; | |
260 | |
261 int _major_progress; // Count of something big happening | |
262 bool _has_loops; // True if the method _may_ have some loops | |
263 bool _has_split_ifs; // True if the method _may_ have some split-if | |
264 bool _has_unsafe_access; // True if the method _may_ produce faults in unsafe loads or stores. | |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
265 bool _has_stringbuilder; // True StringBuffers or StringBuilders are allocated |
0 | 266 uint _trap_hist[trapHistLength]; // Cumulative traps |
267 bool _trap_can_recompile; // Have we emitted a recompiling trap? | |
268 uint _decompile_count; // Cumulative decompilation counts. | |
269 bool _do_inlining; // True if we intend to do inlining | |
270 bool _do_scheduling; // True if we intend to do scheduling | |
418 | 271 bool _do_freq_based_layout; // True if we intend to do frequency based block layout |
0 | 272 bool _do_count_invocations; // True if we generate code to count invocations |
273 bool _do_method_data_update; // True if we generate code to update methodDataOops | |
274 int _AliasLevel; // Locally-adjusted version of AliasLevel flag. | |
275 bool _print_assembly; // True if we should dump assembly code for this compilation | |
276 #ifndef PRODUCT | |
277 bool _trace_opto_output; | |
367
194b8e3a2fc4
6384206: Phis which are later unneeded are impairing our ability to inline based on static types
never
parents:
196
diff
changeset
|
278 bool _parsed_irreducible_loop; // True if ciTypeFlow detected irreducible loops during parsing |
0 | 279 #endif |
280 | |
1265 | 281 // JSR 292 |
282 bool _has_method_handle_invokes; // True if this method has MethodHandle invokes. | |
283 | |
0 | 284 // Compilation environment. |
285 Arena _comp_arena; // Arena with lifetime equivalent to Compile | |
286 ciEnv* _env; // CI interface | |
287 CompileLog* _log; // from CompilerThread | |
288 const char* _failure_reason; // for record_failure/failing pattern | |
289 GrowableArray<CallGenerator*>* _intrinsics; // List of intrinsics. | |
290 GrowableArray<Node*>* _macro_nodes; // List of nodes which need to be expanded before matching. | |
1172 | 291 GrowableArray<Node*>* _predicate_opaqs; // List of Opaque1 nodes for the loop predicates. |
0 | 292 ConnectionGraph* _congraph; |
293 #ifndef PRODUCT | |
294 IdealGraphPrinter* _printer; | |
295 #endif | |
296 | |
297 // Node management | |
298 uint _unique; // Counter for unique Node indices | |
299 debug_only(static int _debug_idx;) // Monotonic counter (not reset), use -XX:BreakAtNode=<idx> | |
300 Arena _node_arena; // Arena for new-space Nodes | |
301 Arena _old_arena; // Arena for old-space Nodes, lifetime during xform | |
302 RootNode* _root; // Unique root of compilation, or NULL after bail-out. | |
303 Node* _top; // Unique top node. (Reset by various phases.) | |
304 | |
305 Node* _immutable_memory; // Initial memory state | |
306 | |
307 Node* _recent_alloc_obj; | |
308 Node* _recent_alloc_ctl; | |
309 | |
2008 | 310 // Constant table |
311 ConstantTable _constant_table; // The constant table for this compile. | |
312 MachConstantBaseNode* _mach_constant_base_node; // Constant table base node singleton. | |
313 | |
314 | |
0 | 315 // Blocked array of debugging and profiling information, |
316 // tracked per node. | |
317 enum { _log2_node_notes_block_size = 8, | |
318 _node_notes_block_size = (1<<_log2_node_notes_block_size) | |
319 }; | |
320 GrowableArray<Node_Notes*>* _node_note_array; | |
321 Node_Notes* _default_node_notes; // default notes for new nodes | |
322 | |
323 // After parsing and every bulk phase we hang onto the Root instruction. | |
324 // The RootNode instruction is where the whole program begins. It produces | |
325 // the initial Control and BOTTOM for everybody else. | |
326 | |
327 // Type management | |
328 Arena _Compile_types; // Arena for all types | |
329 Arena* _type_arena; // Alias for _Compile_types except in Initialize_shared() | |
330 Dict* _type_dict; // Intern table | |
331 void* _type_hwm; // Last allocation (see Type::operator new/delete) | |
332 size_t _type_last_size; // Last allocation size (see Type::operator new/delete) | |
333 ciMethod* _last_tf_m; // Cache for | |
334 const TypeFunc* _last_tf; // TypeFunc::make | |
335 AliasType** _alias_types; // List of alias types seen so far. | |
336 int _num_alias_types; // Logical length of _alias_types | |
337 int _max_alias_types; // Physical length of _alias_types | |
338 AliasCacheEntry _alias_cache[AliasCacheSize]; // Gets aliases w/o data structure walking | |
339 | |
340 // Parsing, optimization | |
341 PhaseGVN* _initial_gvn; // Results of parse-time PhaseGVN | |
342 Unique_Node_List* _for_igvn; // Initial work-list for next round of Iterative GVN | |
343 WarmCallInfo* _warm_calls; // Sorted work-list for heat-based inlining. | |
344 | |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
345 GrowableArray<CallGenerator*> _late_inlines; // List of CallGenerators to be revisited after |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
346 // main parsing has finished. |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
347 |
0 | 348 // Matching, CFG layout, allocation, code generation |
349 PhaseCFG* _cfg; // Results of CFG finding | |
350 bool _select_24_bit_instr; // We selected an instruction with a 24-bit result | |
351 bool _in_24_bit_fp_mode; // We are emitting instructions with 24-bit results | |
859
ea3f9723b5cf
6860599: nodes limit could be reached during Output phase
kvn
parents:
418
diff
changeset
|
352 int _java_calls; // Number of java calls in the method |
ea3f9723b5cf
6860599: nodes limit could be reached during Output phase
kvn
parents:
418
diff
changeset
|
353 int _inner_loops; // Number of inner loops in the method |
0 | 354 Matcher* _matcher; // Engine to map ideal to machine instructions |
355 PhaseRegAlloc* _regalloc; // Results of register allocation. | |
356 int _frame_slots; // Size of total frame in stack slots | |
357 CodeOffsets _code_offsets; // Offsets into the code for various interesting entries | |
358 RegMask _FIRST_STACK_mask; // All stack slots usable for spills (depends on frame layout) | |
359 Arena* _indexSet_arena; // control IndexSet allocation within PhaseChaitin | |
360 void* _indexSet_free_block_list; // free list of IndexSet bit blocks | |
361 | |
362 uint _node_bundling_limit; | |
363 Bundle* _node_bundling_base; // Information for instruction bundling | |
364 | |
365 // Instruction bits passed off to the VM | |
366 int _method_size; // Size of nmethod code segment in bytes | |
367 CodeBuffer _code_buffer; // Where the code is assembled | |
368 int _first_block_size; // Size of unvalidated entry point code / OSR poison code | |
369 ExceptionHandlerTable _handler_table; // Table of native-code exception handlers | |
370 ImplicitExceptionTable _inc_table; // Table of implicit null checks in native code | |
371 OopMapSet* _oop_map_set; // Table of oop maps (one for each safepoint location) | |
372 static int _CompiledZap_count; // counter compared against CompileZap[First/Last] | |
373 BufferBlob* _scratch_buffer_blob; // For temporary code buffers. | |
374 relocInfo* _scratch_locs_memory; // For temporary code buffers. | |
2008 | 375 int _scratch_const_size; // For temporary code buffers. |
376 bool _in_scratch_emit_size; // true when in scratch_emit_size. | |
0 | 377 |
378 public: | |
379 // Accessors | |
380 | |
381 // The Compile instance currently active in this (compiler) thread. | |
382 static Compile* current() { | |
383 return (Compile*) ciEnv::current()->compiler_data(); | |
384 } | |
385 | |
386 // ID for this compilation. Useful for setting breakpoints in the debugger. | |
387 int compile_id() const { return _compile_id; } | |
388 | |
389 // Does this compilation allow instructions to subsume loads? User | |
390 // instructions that subsume a load may result in an unschedulable | |
391 // instruction sequence. | |
392 bool subsume_loads() const { return _subsume_loads; } | |
38
b789bcaf2dd9
6667610: (Escape Analysis) retry compilation without EA if it fails
kvn
parents:
0
diff
changeset
|
393 // Do escape analysis. |
b789bcaf2dd9
6667610: (Escape Analysis) retry compilation without EA if it fails
kvn
parents:
0
diff
changeset
|
394 bool do_escape_analysis() const { return _do_escape_analysis; } |
0 | 395 bool save_argument_registers() const { return _save_argument_registers; } |
396 | |
397 | |
398 // Other fixed compilation parameters. | |
399 ciMethod* method() const { return _method; } | |
400 int entry_bci() const { return _entry_bci; } | |
401 bool is_osr_compilation() const { return _entry_bci != InvocationEntryBci; } | |
402 bool is_method_compilation() const { return (_method != NULL && !_method->flags().is_native()); } | |
403 const TypeFunc* tf() const { assert(_tf!=NULL, ""); return _tf; } | |
404 void init_tf(const TypeFunc* tf) { assert(_tf==NULL, ""); _tf = tf; } | |
405 InlineTree* ilt() const { return _ilt; } | |
406 address stub_function() const { return _stub_function; } | |
407 const char* stub_name() const { return _stub_name; } | |
408 address stub_entry_point() const { return _stub_entry_point; } | |
409 | |
410 // Control of this compilation. | |
411 int fixed_slots() const { assert(_fixed_slots >= 0, ""); return _fixed_slots; } | |
412 void set_fixed_slots(int n) { _fixed_slots = n; } | |
413 int major_progress() const { return _major_progress; } | |
414 void set_major_progress() { _major_progress++; } | |
415 void clear_major_progress() { _major_progress = 0; } | |
416 int num_loop_opts() const { return _num_loop_opts; } | |
417 void set_num_loop_opts(int n) { _num_loop_opts = n; } | |
418 int max_inline_size() const { return _max_inline_size; } | |
419 void set_freq_inline_size(int n) { _freq_inline_size = n; } | |
420 int freq_inline_size() const { return _freq_inline_size; } | |
421 void set_max_inline_size(int n) { _max_inline_size = n; } | |
422 bool has_loops() const { return _has_loops; } | |
423 void set_has_loops(bool z) { _has_loops = z; } | |
424 bool has_split_ifs() const { return _has_split_ifs; } | |
425 void set_has_split_ifs(bool z) { _has_split_ifs = z; } | |
426 bool has_unsafe_access() const { return _has_unsafe_access; } | |
427 void set_has_unsafe_access(bool z) { _has_unsafe_access = z; } | |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
428 bool has_stringbuilder() const { return _has_stringbuilder; } |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
429 void set_has_stringbuilder(bool z) { _has_stringbuilder = z; } |
0 | 430 void set_trap_count(uint r, uint c) { assert(r < trapHistLength, "oob"); _trap_hist[r] = c; } |
431 uint trap_count(uint r) const { assert(r < trapHistLength, "oob"); return _trap_hist[r]; } | |
432 bool trap_can_recompile() const { return _trap_can_recompile; } | |
433 void set_trap_can_recompile(bool z) { _trap_can_recompile = z; } | |
434 uint decompile_count() const { return _decompile_count; } | |
435 void set_decompile_count(uint c) { _decompile_count = c; } | |
436 bool allow_range_check_smearing() const; | |
437 bool do_inlining() const { return _do_inlining; } | |
438 void set_do_inlining(bool z) { _do_inlining = z; } | |
439 bool do_scheduling() const { return _do_scheduling; } | |
440 void set_do_scheduling(bool z) { _do_scheduling = z; } | |
418 | 441 bool do_freq_based_layout() const{ return _do_freq_based_layout; } |
442 void set_do_freq_based_layout(bool z){ _do_freq_based_layout = z; } | |
0 | 443 bool do_count_invocations() const{ return _do_count_invocations; } |
444 void set_do_count_invocations(bool z){ _do_count_invocations = z; } | |
445 bool do_method_data_update() const { return _do_method_data_update; } | |
446 void set_do_method_data_update(bool z) { _do_method_data_update = z; } | |
447 int AliasLevel() const { return _AliasLevel; } | |
448 bool print_assembly() const { return _print_assembly; } | |
449 void set_print_assembly(bool z) { _print_assembly = z; } | |
450 // check the CompilerOracle for special behaviours for this compile | |
451 bool method_has_option(const char * option) { | |
452 return method() != NULL && method()->has_option(option); | |
453 } | |
454 #ifndef PRODUCT | |
455 bool trace_opto_output() const { return _trace_opto_output; } | |
367
194b8e3a2fc4
6384206: Phis which are later unneeded are impairing our ability to inline based on static types
never
parents:
196
diff
changeset
|
456 bool parsed_irreducible_loop() const { return _parsed_irreducible_loop; } |
194b8e3a2fc4
6384206: Phis which are later unneeded are impairing our ability to inline based on static types
never
parents:
196
diff
changeset
|
457 void set_parsed_irreducible_loop(bool z) { _parsed_irreducible_loop = z; } |
0 | 458 #endif |
459 | |
1265 | 460 // JSR 292 |
461 bool has_method_handle_invokes() const { return _has_method_handle_invokes; } | |
462 void set_has_method_handle_invokes(bool z) { _has_method_handle_invokes = z; } | |
463 | |
0 | 464 void begin_method() { |
465 #ifndef PRODUCT | |
466 if (_printer) _printer->begin_method(this); | |
467 #endif | |
468 } | |
469 void print_method(const char * name, int level = 1) { | |
470 #ifndef PRODUCT | |
471 if (_printer) _printer->print_method(this, name, level); | |
472 #endif | |
473 } | |
474 void end_method() { | |
475 #ifndef PRODUCT | |
476 if (_printer) _printer->end_method(); | |
477 #endif | |
478 } | |
479 | |
480 int macro_count() { return _macro_nodes->length(); } | |
1172 | 481 int predicate_count() { return _predicate_opaqs->length();} |
0 | 482 Node* macro_node(int idx) { return _macro_nodes->at(idx); } |
1172 | 483 Node* predicate_opaque1_node(int idx) { return _predicate_opaqs->at(idx);} |
0 | 484 ConnectionGraph* congraph() { return _congraph;} |
1634
60a14ad85270
6966411: escape.cpp:450 assert(base->Opcode() == Op_ConP
kvn
parents:
1552
diff
changeset
|
485 void set_congraph(ConnectionGraph* congraph) { _congraph = congraph;} |
0 | 486 void add_macro_node(Node * n) { |
487 //assert(n->is_macro(), "must be a macro node"); | |
488 assert(!_macro_nodes->contains(n), " duplicate entry in expand list"); | |
489 _macro_nodes->append(n); | |
490 } | |
491 void remove_macro_node(Node * n) { | |
492 // this function may be called twice for a node so check | |
493 // that the node is in the array before attempting to remove it | |
494 if (_macro_nodes->contains(n)) | |
495 _macro_nodes->remove(n); | |
1172 | 496 // remove from _predicate_opaqs list also if it is there |
497 if (predicate_count() > 0 && _predicate_opaqs->contains(n)){ | |
498 _predicate_opaqs->remove(n); | |
499 } | |
0 | 500 } |
1172 | 501 void add_predicate_opaq(Node * n) { |
502 assert(!_predicate_opaqs->contains(n), " duplicate entry in predicate opaque1"); | |
503 assert(_macro_nodes->contains(n), "should have already been in macro list"); | |
504 _predicate_opaqs->append(n); | |
505 } | |
506 // remove the opaque nodes that protect the predicates so that the unused checks and | |
507 // uncommon traps will be eliminated from the graph. | |
508 void cleanup_loop_predicates(PhaseIterGVN &igvn); | |
2445 | 509 bool is_predicate_opaq(Node * n) { |
510 return _predicate_opaqs->contains(n); | |
511 } | |
0 | 512 |
513 // Compilation environment. | |
514 Arena* comp_arena() { return &_comp_arena; } | |
515 ciEnv* env() const { return _env; } | |
516 CompileLog* log() const { return _log; } | |
517 bool failing() const { return _env->failing() || _failure_reason != NULL; } | |
518 const char* failure_reason() { return _failure_reason; } | |
519 bool failure_reason_is(const char* r) { return (r==_failure_reason) || (r!=NULL && _failure_reason!=NULL && strcmp(r, _failure_reason)==0); } | |
520 | |
521 void record_failure(const char* reason); | |
522 void record_method_not_compilable(const char* reason, bool all_tiers = false) { | |
523 // All bailouts cover "all_tiers" when TieredCompilation is off. | |
524 if (!TieredCompilation) all_tiers = true; | |
525 env()->record_method_not_compilable(reason, all_tiers); | |
526 // Record failure reason. | |
527 record_failure(reason); | |
528 } | |
529 void record_method_not_compilable_all_tiers(const char* reason) { | |
530 record_method_not_compilable(reason, true); | |
531 } | |
532 bool check_node_count(uint margin, const char* reason) { | |
533 if (unique() + margin > (uint)MaxNodeLimit) { | |
534 record_method_not_compilable(reason); | |
535 return true; | |
536 } else { | |
537 return false; | |
538 } | |
539 } | |
540 | |
541 // Node management | |
542 uint unique() const { return _unique; } | |
543 uint next_unique() { return _unique++; } | |
544 void set_unique(uint i) { _unique = i; } | |
545 static int debug_idx() { return debug_only(_debug_idx)+0; } | |
546 static void set_debug_idx(int i) { debug_only(_debug_idx = i); } | |
547 Arena* node_arena() { return &_node_arena; } | |
548 Arena* old_arena() { return &_old_arena; } | |
549 RootNode* root() const { return _root; } | |
550 void set_root(RootNode* r) { _root = r; } | |
551 StartNode* start() const; // (Derived from root.) | |
552 void init_start(StartNode* s); | |
553 Node* immutable_memory(); | |
554 | |
555 Node* recent_alloc_ctl() const { return _recent_alloc_ctl; } | |
556 Node* recent_alloc_obj() const { return _recent_alloc_obj; } | |
557 void set_recent_alloc(Node* ctl, Node* obj) { | |
558 _recent_alloc_ctl = ctl; | |
559 _recent_alloc_obj = obj; | |
560 } | |
561 | |
2008 | 562 // Constant table |
563 ConstantTable& constant_table() { return _constant_table; } | |
564 | |
565 MachConstantBaseNode* mach_constant_base_node(); | |
566 bool has_mach_constant_base_node() const { return _mach_constant_base_node != NULL; } | |
567 | |
0 | 568 // Handy undefined Node |
569 Node* top() const { return _top; } | |
570 | |
571 // these are used by guys who need to know about creation and transformation of top: | |
572 Node* cached_top_node() { return _top; } | |
573 void set_cached_top_node(Node* tn); | |
574 | |
575 GrowableArray<Node_Notes*>* node_note_array() const { return _node_note_array; } | |
576 void set_node_note_array(GrowableArray<Node_Notes*>* arr) { _node_note_array = arr; } | |
577 Node_Notes* default_node_notes() const { return _default_node_notes; } | |
578 void set_default_node_notes(Node_Notes* n) { _default_node_notes = n; } | |
579 | |
580 Node_Notes* node_notes_at(int idx) { | |
581 return locate_node_notes(_node_note_array, idx, false); | |
582 } | |
583 inline bool set_node_notes_at(int idx, Node_Notes* value); | |
584 | |
585 // Copy notes from source to dest, if they exist. | |
586 // Overwrite dest only if source provides something. | |
587 // Return true if information was moved. | |
588 bool copy_node_notes_to(Node* dest, Node* source); | |
589 | |
590 // Workhorse function to sort out the blocked Node_Notes array: | |
591 inline Node_Notes* locate_node_notes(GrowableArray<Node_Notes*>* arr, | |
592 int idx, bool can_grow = false); | |
593 | |
594 void grow_node_notes(GrowableArray<Node_Notes*>* arr, int grow_by); | |
595 | |
596 // Type management | |
597 Arena* type_arena() { return _type_arena; } | |
598 Dict* type_dict() { return _type_dict; } | |
599 void* type_hwm() { return _type_hwm; } | |
600 size_t type_last_size() { return _type_last_size; } | |
601 int num_alias_types() { return _num_alias_types; } | |
602 | |
603 void init_type_arena() { _type_arena = &_Compile_types; } | |
604 void set_type_arena(Arena* a) { _type_arena = a; } | |
605 void set_type_dict(Dict* d) { _type_dict = d; } | |
606 void set_type_hwm(void* p) { _type_hwm = p; } | |
607 void set_type_last_size(size_t sz) { _type_last_size = sz; } | |
608 | |
609 const TypeFunc* last_tf(ciMethod* m) { | |
610 return (m == _last_tf_m) ? _last_tf : NULL; | |
611 } | |
612 void set_last_tf(ciMethod* m, const TypeFunc* tf) { | |
613 assert(m != NULL || tf == NULL, ""); | |
614 _last_tf_m = m; | |
615 _last_tf = tf; | |
616 } | |
617 | |
618 AliasType* alias_type(int idx) { assert(idx < num_alias_types(), "oob"); return _alias_types[idx]; } | |
2376
c7f3d0b4570f
7017732: move static fields into Class to prepare for perm gen removal
never
parents:
2008
diff
changeset
|
619 AliasType* alias_type(const TypePtr* adr_type, ciField* field = NULL) { return find_alias_type(adr_type, false, field); } |
0 | 620 bool have_alias_type(const TypePtr* adr_type); |
621 AliasType* alias_type(ciField* field); | |
622 | |
623 int get_alias_index(const TypePtr* at) { return alias_type(at)->index(); } | |
624 const TypePtr* get_adr_type(uint aidx) { return alias_type(aidx)->adr_type(); } | |
625 int get_general_index(uint aidx) { return alias_type(aidx)->general_index(); } | |
626 | |
627 // Building nodes | |
628 void rethrow_exceptions(JVMState* jvms); | |
629 void return_values(JVMState* jvms); | |
630 JVMState* build_start_state(StartNode* start, const TypeFunc* tf); | |
631 | |
632 // Decide how to build a call. | |
633 // The profile factor is a discount to apply to this site's interp. profile. | |
634 CallGenerator* call_generator(ciMethod* call_method, int vtable_index, bool call_is_virtual, JVMState* jvms, bool allow_inline, float profile_factor); | |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
635 bool should_delay_inlining(ciMethod* call_method, JVMState* jvms); |
0 | 636 |
637 // Report if there were too many traps at a current method and bci. | |
638 // Report if a trap was recorded, and/or PerMethodTrapLimit was exceeded. | |
639 // If there is no MDO at all, report no trap unless told to assume it. | |
640 bool too_many_traps(ciMethod* method, int bci, Deoptimization::DeoptReason reason); | |
641 // This version, unspecific to a particular bci, asks if | |
642 // PerMethodTrapLimit was exceeded for all inlined methods seen so far. | |
643 bool too_many_traps(Deoptimization::DeoptReason reason, | |
644 // Privately used parameter for logging: | |
645 ciMethodData* logmd = NULL); | |
646 // Report if there were too many recompiles at a method and bci. | |
647 bool too_many_recompiles(ciMethod* method, int bci, Deoptimization::DeoptReason reason); | |
648 | |
649 // Parsing, optimization | |
650 PhaseGVN* initial_gvn() { return _initial_gvn; } | |
651 Unique_Node_List* for_igvn() { return _for_igvn; } | |
652 inline void record_for_igvn(Node* n); // Body is after class Unique_Node_List. | |
653 void set_initial_gvn(PhaseGVN *gvn) { _initial_gvn = gvn; } | |
654 void set_for_igvn(Unique_Node_List *for_igvn) { _for_igvn = for_igvn; } | |
655 | |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
656 // Replace n by nn using initial_gvn, calling hash_delete and |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
657 // record_for_igvn as needed. |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
658 void gvn_replace_by(Node* n, Node* nn); |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
659 |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
660 |
0 | 661 void identify_useful_nodes(Unique_Node_List &useful); |
662 void remove_useless_nodes (Unique_Node_List &useful); | |
663 | |
664 WarmCallInfo* warm_calls() const { return _warm_calls; } | |
665 void set_warm_calls(WarmCallInfo* l) { _warm_calls = l; } | |
666 WarmCallInfo* pop_warm_call(); | |
667 | |
1080
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
668 // Record this CallGenerator for inlining at the end of parsing. |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
669 void add_late_inline(CallGenerator* cg) { _late_inlines.push(cg); } |
7c57aead6d3e
6892658: C2 should optimize some stringbuilder patterns
never
parents:
948
diff
changeset
|
670 |
0 | 671 // Matching, CFG layout, allocation, code generation |
672 PhaseCFG* cfg() { return _cfg; } | |
673 bool select_24_bit_instr() const { return _select_24_bit_instr; } | |
674 bool in_24_bit_fp_mode() const { return _in_24_bit_fp_mode; } | |
859
ea3f9723b5cf
6860599: nodes limit could be reached during Output phase
kvn
parents:
418
diff
changeset
|
675 bool has_java_calls() const { return _java_calls > 0; } |
ea3f9723b5cf
6860599: nodes limit could be reached during Output phase
kvn
parents:
418
diff
changeset
|
676 int java_calls() const { return _java_calls; } |
ea3f9723b5cf
6860599: nodes limit could be reached during Output phase
kvn
parents:
418
diff
changeset
|
677 int inner_loops() const { return _inner_loops; } |
0 | 678 Matcher* matcher() { return _matcher; } |
679 PhaseRegAlloc* regalloc() { return _regalloc; } | |
680 int frame_slots() const { return _frame_slots; } | |
681 int frame_size_in_words() const; // frame_slots in units of the polymorphic 'words' | |
682 RegMask& FIRST_STACK_mask() { return _FIRST_STACK_mask; } | |
683 Arena* indexSet_arena() { return _indexSet_arena; } | |
684 void* indexSet_free_block_list() { return _indexSet_free_block_list; } | |
685 uint node_bundling_limit() { return _node_bundling_limit; } | |
686 Bundle* node_bundling_base() { return _node_bundling_base; } | |
687 void set_node_bundling_limit(uint n) { _node_bundling_limit = n; } | |
688 void set_node_bundling_base(Bundle* b) { _node_bundling_base = b; } | |
689 bool starts_bundle(const Node *n) const; | |
690 bool need_stack_bang(int frame_size_in_bytes) const; | |
691 bool need_register_stack_bang() const; | |
692 | |
693 void set_matcher(Matcher* m) { _matcher = m; } | |
694 //void set_regalloc(PhaseRegAlloc* ra) { _regalloc = ra; } | |
695 void set_indexSet_arena(Arena* a) { _indexSet_arena = a; } | |
696 void set_indexSet_free_block_list(void* p) { _indexSet_free_block_list = p; } | |
697 | |
698 // Remember if this compilation changes hardware mode to 24-bit precision | |
699 void set_24_bit_selection_and_mode(bool selection, bool mode) { | |
700 _select_24_bit_instr = selection; | |
701 _in_24_bit_fp_mode = mode; | |
702 } | |
703 | |
859
ea3f9723b5cf
6860599: nodes limit could be reached during Output phase
kvn
parents:
418
diff
changeset
|
704 void set_java_calls(int z) { _java_calls = z; } |
ea3f9723b5cf
6860599: nodes limit could be reached during Output phase
kvn
parents:
418
diff
changeset
|
705 void set_inner_loops(int z) { _inner_loops = z; } |
0 | 706 |
707 // Instruction bits passed off to the VM | |
708 int code_size() { return _method_size; } | |
709 CodeBuffer* code_buffer() { return &_code_buffer; } | |
710 int first_block_size() { return _first_block_size; } | |
711 void set_frame_complete(int off) { _code_offsets.set_value(CodeOffsets::Frame_Complete, off); } | |
712 ExceptionHandlerTable* handler_table() { return &_handler_table; } | |
713 ImplicitExceptionTable* inc_table() { return &_inc_table; } | |
714 OopMapSet* oop_map_set() { return _oop_map_set; } | |
715 DebugInformationRecorder* debug_info() { return env()->debug_info(); } | |
716 Dependencies* dependencies() { return env()->dependencies(); } | |
717 static int CompiledZap_count() { return _CompiledZap_count; } | |
718 BufferBlob* scratch_buffer_blob() { return _scratch_buffer_blob; } | |
2008 | 719 void init_scratch_buffer_blob(int const_size); |
720 void clear_scratch_buffer_blob(); | |
0 | 721 void set_scratch_buffer_blob(BufferBlob* b) { _scratch_buffer_blob = b; } |
722 relocInfo* scratch_locs_memory() { return _scratch_locs_memory; } | |
723 void set_scratch_locs_memory(relocInfo* b) { _scratch_locs_memory = b; } | |
724 | |
725 // emit to scratch blob, report resulting size | |
726 uint scratch_emit_size(const Node* n); | |
2008 | 727 void set_in_scratch_emit_size(bool x) { _in_scratch_emit_size = x; } |
728 bool in_scratch_emit_size() const { return _in_scratch_emit_size; } | |
0 | 729 |
730 enum ScratchBufferBlob { | |
731 MAX_inst_size = 1024, | |
732 MAX_locs_size = 128, // number of relocInfo elements | |
733 MAX_const_size = 128, | |
734 MAX_stubs_size = 128 | |
735 }; | |
736 | |
737 // Major entry point. Given a Scope, compile the associated method. | |
738 // For normal compilations, entry_bci is InvocationEntryBci. For on stack | |
739 // replacement, entry_bci indicates the bytecode for which to compile a | |
740 // continuation. | |
741 Compile(ciEnv* ci_env, C2Compiler* compiler, ciMethod* target, | |
38
b789bcaf2dd9
6667610: (Escape Analysis) retry compilation without EA if it fails
kvn
parents:
0
diff
changeset
|
742 int entry_bci, bool subsume_loads, bool do_escape_analysis); |
0 | 743 |
744 // Second major entry point. From the TypeFunc signature, generate code | |
745 // to pass arguments from the Java calling convention to the C calling | |
746 // convention. | |
747 Compile(ciEnv* ci_env, const TypeFunc *(*gen)(), | |
748 address stub_function, const char *stub_name, | |
749 int is_fancy_jump, bool pass_tls, | |
750 bool save_arg_registers, bool return_pc); | |
751 | |
752 // From the TypeFunc signature, generate code to pass arguments | |
753 // from Compiled calling convention to Interpreter's calling convention | |
754 void Generate_Compiled_To_Interpreter_Graph(const TypeFunc *tf, address interpreter_entry); | |
755 | |
756 // From the TypeFunc signature, generate code to pass arguments | |
757 // from Interpreter's calling convention to Compiler's calling convention | |
758 void Generate_Interpreter_To_Compiled_Graph(const TypeFunc *tf); | |
759 | |
760 // Are we compiling a method? | |
761 bool has_method() { return method() != NULL; } | |
762 | |
763 // Maybe print some information about this compile. | |
764 void print_compile_messages(); | |
765 | |
766 // Final graph reshaping, a post-pass after the regular optimizer is done. | |
767 bool final_graph_reshaping(); | |
768 | |
769 // returns true if adr is completely contained in the given alias category | |
770 bool must_alias(const TypePtr* adr, int alias_idx); | |
771 | |
772 // returns true if adr overlaps with the given alias category | |
773 bool can_alias(const TypePtr* adr, int alias_idx); | |
774 | |
775 // Driver for converting compiler's IR into machine code bits | |
776 void Output(); | |
777 | |
778 // Accessors for node bundling info. | |
779 Bundle* node_bundling(const Node *n); | |
780 bool valid_bundle_info(const Node *n); | |
781 | |
782 // Schedule and Bundle the instructions | |
783 void ScheduleAndBundle(); | |
784 | |
785 // Build OopMaps for each GC point | |
786 void BuildOopMaps(); | |
63
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
787 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
788 // Append debug info for the node "local" at safepoint node "sfpt" to the |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
789 // "array", May also consult and add to "objs", which describes the |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
790 // scalar-replaced objects. |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
791 void FillLocArray( int idx, MachSafePointNode* sfpt, |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
792 Node *local, GrowableArray<ScopeValue*> *array, |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
793 GrowableArray<ScopeValue*> *objs ); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
794 |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
795 // If "objs" contains an ObjectValue whose id is "id", returns it, else NULL. |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
796 static ObjectValue* sv_for_node_id(GrowableArray<ScopeValue*> *objs, int id); |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
797 // Requres that "objs" does not contains an ObjectValue whose id matches |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
798 // that of "sv. Appends "sv". |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
799 static void set_sv_for_object_node(GrowableArray<ScopeValue*> *objs, |
eac007780a58
6671807: (Escape Analysis) Add new ideal node to represent the state of a scalarized object at a safepoint
kvn
parents:
38
diff
changeset
|
800 ObjectValue* sv ); |
0 | 801 |
802 // Process an OopMap Element while emitting nodes | |
803 void Process_OopMap_Node(MachNode *mach, int code_offset); | |
804 | |
3851 | 805 // Initialize code buffer |
806 CodeBuffer* init_buffer(uint* blk_starts); | |
807 | |
0 | 808 // Write out basic block data to code buffer |
3851 | 809 void fill_buffer(CodeBuffer* cb, uint* blk_starts); |
0 | 810 |
811 // Determine which variable sized branches can be shortened | |
3851 | 812 void shorten_branches(uint* blk_starts, int& code_size, int& reloc_size, int& stub_size); |
813 | |
0 | 814 // Compute the size of first NumberOfLoopInstrToAlign instructions |
815 // at the head of a loop. | |
816 void compute_loop_first_inst_sizes(); | |
817 | |
818 // Compute the information for the exception tables | |
819 void FillExceptionTables(uint cnt, uint *call_returns, uint *inct_starts, Label *blk_labels); | |
820 | |
821 // Stack slots that may be unused by the calling convention but must | |
822 // otherwise be preserved. On Intel this includes the return address. | |
823 // On PowerPC it includes the 4 words holding the old TOC & LR glue. | |
824 uint in_preserve_stack_slots(); | |
825 | |
826 // "Top of Stack" slots that may be unused by the calling convention but must | |
827 // otherwise be preserved. | |
828 // On Intel these are not necessary and the value can be zero. | |
829 // On Sparc this describes the words reserved for storing a register window | |
830 // when an interrupt occurs. | |
831 static uint out_preserve_stack_slots(); | |
832 | |
833 // Number of outgoing stack slots killed above the out_preserve_stack_slots | |
834 // for calls to C. Supports the var-args backing area for register parms. | |
835 uint varargs_C_out_slots_killed() const; | |
836 | |
837 // Number of Stack Slots consumed by a synchronization entry | |
838 int sync_stack_slots() const; | |
839 | |
840 // Compute the name of old_SP. See <arch>.ad for frame layout. | |
841 OptoReg::Name compute_old_SP(); | |
842 | |
843 #ifdef ENABLE_ZAP_DEAD_LOCALS | |
844 static bool is_node_getting_a_safepoint(Node*); | |
845 void Insert_zap_nodes(); | |
846 Node* call_zap_node(MachSafePointNode* n, int block_no); | |
847 #endif | |
848 | |
849 private: | |
850 // Phase control: | |
851 void Init(int aliaslevel); // Prepare for a single compilation | |
852 int Inline_Warm(); // Find more inlining work. | |
853 void Finish_Warm(); // Give up on further inlines. | |
854 void Optimize(); // Given a graph, optimize it | |
855 void Code_Gen(); // Generate code from a graph | |
856 | |
857 // Management of the AliasType table. | |
858 void grow_alias_types(); | |
859 AliasCacheEntry* probe_alias_cache(const TypePtr* adr_type); | |
860 const TypePtr *flatten_alias_type(const TypePtr* adr_type) const; | |
2376
c7f3d0b4570f
7017732: move static fields into Class to prepare for perm gen removal
never
parents:
2008
diff
changeset
|
861 AliasType* find_alias_type(const TypePtr* adr_type, bool no_create, ciField* field); |
0 | 862 |
863 void verify_top(Node*) const PRODUCT_RETURN; | |
864 | |
865 // Intrinsic setup. | |
866 void register_library_intrinsics(); // initializer | |
867 CallGenerator* make_vm_intrinsic(ciMethod* m, bool is_virtual); // constructor | |
868 int intrinsic_insertion_index(ciMethod* m, bool is_virtual); // helper | |
869 CallGenerator* find_intrinsic(ciMethod* m, bool is_virtual); // query fn | |
870 void register_intrinsic(CallGenerator* cg); // update fn | |
871 | |
872 #ifndef PRODUCT | |
873 static juint _intrinsic_hist_count[vmIntrinsics::ID_LIMIT]; | |
874 static jubyte _intrinsic_hist_flags[vmIntrinsics::ID_LIMIT]; | |
875 #endif | |
876 | |
877 public: | |
878 | |
879 // Note: Histogram array size is about 1 Kb. | |
880 enum { // flag bits: | |
881 _intrinsic_worked = 1, // succeeded at least once | |
882 _intrinsic_failed = 2, // tried it but it failed | |
883 _intrinsic_disabled = 4, // was requested but disabled (e.g., -XX:-InlineUnsafeOps) | |
884 _intrinsic_virtual = 8, // was seen in the virtual form (rare) | |
885 _intrinsic_both = 16 // was seen in the non-virtual form (usual) | |
886 }; | |
887 // Update histogram. Return boolean if this is a first-time occurrence. | |
888 static bool gather_intrinsic_statistics(vmIntrinsics::ID id, | |
889 bool is_virtual, int flags) PRODUCT_RETURN0; | |
890 static void print_intrinsic_statistics() PRODUCT_RETURN; | |
891 | |
892 // Graph verification code | |
893 // Walk the node list, verifying that there is a one-to-one | |
894 // correspondence between Use-Def edges and Def-Use edges | |
895 // The option no_dead_code enables stronger checks that the | |
896 // graph is strongly connected from root in both directions. | |
897 void verify_graph_edges(bool no_dead_code = false) PRODUCT_RETURN; | |
898 | |
899 // Print bytecodes, including the scope inlining tree | |
900 void print_codes(); | |
901 | |
902 // End-of-run dumps. | |
903 static void print_statistics() PRODUCT_RETURN; | |
904 | |
905 // Dump formatted assembly | |
906 void dump_asm(int *pcs = NULL, uint pc_limit = 0) PRODUCT_RETURN; | |
907 void dump_pc(int *pcs, int pc_limit, Node *n); | |
908 | |
909 // Verify ADLC assumptions during startup | |
910 static void adlc_verification() PRODUCT_RETURN; | |
911 | |
912 // Definitions of pd methods | |
913 static void pd_compiler2_init(); | |
914 }; | |
1972 | 915 |
916 #endif // SHARE_VM_OPTO_COMPILE_HPP |