Mercurial > hg > truffle
comparison src/share/vm/code/compiledIC.hpp @ 6725:da91efe96a93
6964458: Reimplement class meta-data storage to use native memory
Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes
Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland
Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author | coleenp |
---|---|
date | Sat, 01 Sep 2012 13:25:18 -0400 |
parents | 1d1603768966 |
children | a6e09d6dd8e5 |
comparison
equal
deleted
inserted
replaced
6724:36d1d483d5d6 | 6725:da91efe96a93 |
---|---|
1 /* | 1 /* |
2 * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved. | 2 * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved. |
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. | 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 * | 4 * |
5 * This code is free software; you can redistribute it and/or modify it | 5 * This code is free software; you can redistribute it and/or modify it |
6 * under the terms of the GNU General Public License version 2 only, as | 6 * under the terms of the GNU General Public License version 2 only, as |
7 * published by the Free Software Foundation. | 7 * published by the Free Software Foundation. |
24 | 24 |
25 #ifndef SHARE_VM_CODE_COMPILEDIC_HPP | 25 #ifndef SHARE_VM_CODE_COMPILEDIC_HPP |
26 #define SHARE_VM_CODE_COMPILEDIC_HPP | 26 #define SHARE_VM_CODE_COMPILEDIC_HPP |
27 | 27 |
28 #include "interpreter/linkResolver.hpp" | 28 #include "interpreter/linkResolver.hpp" |
29 #include "oops/compiledICHolderKlass.hpp" | 29 #include "oops/compiledICHolder.hpp" |
30 #include "oops/compiledICHolderOop.hpp" | |
31 #include "oops/klassOop.hpp" | |
32 #ifdef TARGET_ARCH_x86 | 30 #ifdef TARGET_ARCH_x86 |
33 # include "nativeInst_x86.hpp" | 31 # include "nativeInst_x86.hpp" |
34 #endif | 32 #endif |
35 #ifdef TARGET_ARCH_sparc | 33 #ifdef TARGET_ARCH_sparc |
36 # include "nativeInst_sparc.hpp" | 34 # include "nativeInst_sparc.hpp" |
55 // [1] --<-- Clean -->--- [1] | 53 // [1] --<-- Clean -->--- [1] |
56 // / (null) \ | 54 // / (null) \ |
57 // / \ /-<-\ | 55 // / \ /-<-\ |
58 // / [2] \ / \ | 56 // / [2] \ / \ |
59 // Interpreted ---------> Monomorphic | [3] | 57 // Interpreted ---------> Monomorphic | [3] |
60 // (compiledICHolderOop) (klassOop) | | 58 // (CompiledICHolder*) (Klass*) | |
61 // \ / \ / | 59 // \ / \ / |
62 // [4] \ / [4] \->-/ | 60 // [4] \ / [4] \->-/ |
63 // \->- Megamorphic -<-/ | 61 // \->- Megamorphic -<-/ |
64 // (methodOop) | 62 // (Method*) |
65 // | 63 // |
66 // The text in paranteses () refere to the value of the inline cache receiver (mov instruction) | 64 // The text in paranteses () refere to the value of the inline cache receiver (mov instruction) |
67 // | 65 // |
68 // The numbers in square brackets refere to the kind of transition: | 66 // The numbers in square brackets refere to the kind of transition: |
69 // [1]: Initial fixup. Receiver it found from debug information | 67 // [1]: Initial fixup. Receiver it found from debug information |
70 // [2]: Compilation of a method | 68 // [2]: Compilation of a method |
71 // [3]: Recompilation of a method (note: only entry is changed. The klassOop must stay the same) | 69 // [3]: Recompilation of a method (note: only entry is changed. The Klass* must stay the same) |
72 // [4]: Inline cache miss. We go directly to megamorphic call. | 70 // [4]: Inline cache miss. We go directly to megamorphic call. |
73 // | 71 // |
74 // The class automatically inserts transition stubs (using the InlineCacheBuffer) when an MT-unsafe | 72 // The class automatically inserts transition stubs (using the InlineCacheBuffer) when an MT-unsafe |
75 // transition is made to a stub. | 73 // transition is made to a stub. |
76 // | 74 // |
77 class CompiledIC; | 75 class CompiledIC; |
78 | 76 class ICStub; |
79 class CompiledICInfo { | 77 |
80 friend class CompiledIC; | 78 class CompiledICInfo : public StackObj { |
81 private: | 79 private: |
82 address _entry; // entry point for call | 80 address _entry; // entry point for call |
83 Handle _cached_oop; // Value of cached_oop (either in stub or inline cache) | 81 void* _cached_value; // Value of cached_value (either in stub or inline cache) |
82 bool _is_icholder; // Is the cached value a CompiledICHolder* | |
84 bool _is_optimized; // it is an optimized virtual call (i.e., can be statically bound) | 83 bool _is_optimized; // it is an optimized virtual call (i.e., can be statically bound) |
85 bool _to_interpreter; // Call it to interpreter | 84 bool _to_interpreter; // Call it to interpreter |
85 bool _release_icholder; | |
86 public: | 86 public: |
87 address entry() const { return _entry; } | 87 address entry() const { return _entry; } |
88 Handle cached_oop() const { return _cached_oop; } | 88 Metadata* cached_metadata() const { assert(!_is_icholder, ""); return (Metadata*)_cached_value; } |
89 CompiledICHolder* claim_cached_icholder() { | |
90 assert(_is_icholder, ""); | |
91 assert(_cached_value != NULL, "must be non-NULL"); | |
92 _release_icholder = false; | |
93 CompiledICHolder* icholder = (CompiledICHolder*)_cached_value; | |
94 icholder->claim(); | |
95 return icholder; | |
96 } | |
89 bool is_optimized() const { return _is_optimized; } | 97 bool is_optimized() const { return _is_optimized; } |
98 bool to_interpreter() const { return _to_interpreter; } | |
99 | |
100 void set_compiled_entry(address entry, Klass* klass, bool is_optimized) { | |
101 _entry = entry; | |
102 _cached_value = (void*)klass; | |
103 _to_interpreter = false; | |
104 _is_icholder = false; | |
105 _is_optimized = is_optimized; | |
106 _release_icholder = false; | |
107 } | |
108 | |
109 void set_interpreter_entry(address entry, Method* method) { | |
110 _entry = entry; | |
111 _cached_value = (void*)method; | |
112 _to_interpreter = true; | |
113 _is_icholder = false; | |
114 _is_optimized = true; | |
115 _release_icholder = false; | |
116 } | |
117 | |
118 void set_icholder_entry(address entry, CompiledICHolder* icholder) { | |
119 _entry = entry; | |
120 _cached_value = (void*)icholder; | |
121 _to_interpreter = true; | |
122 _is_icholder = true; | |
123 _is_optimized = false; | |
124 _release_icholder = true; | |
125 } | |
126 | |
127 CompiledICInfo(): _entry(NULL), _cached_value(NULL), _is_icholder(false), | |
128 _to_interpreter(false), _is_optimized(false), _release_icholder(false) { | |
129 } | |
130 ~CompiledICInfo() { | |
131 // In rare cases the info is computed but not used, so release any | |
132 // CompiledICHolder* that was created | |
133 if (_release_icholder) { | |
134 assert(_is_icholder, "must be"); | |
135 CompiledICHolder* icholder = (CompiledICHolder*)_cached_value; | |
136 icholder->claim(); | |
137 delete icholder; | |
138 } | |
139 } | |
90 }; | 140 }; |
91 | 141 |
92 class CompiledIC: public ResourceObj { | 142 class CompiledIC: public ResourceObj { |
93 friend class InlineCacheBuffer; | 143 friend class InlineCacheBuffer; |
94 friend class ICStub; | 144 friend class ICStub; |
95 | 145 |
96 | 146 |
97 private: | 147 private: |
98 NativeCall* _ic_call; // the call instruction | 148 NativeCall* _ic_call; // the call instruction |
99 oop* _oop_addr; // patchable oop cell for this IC | 149 NativeMovConstReg* _value; // patchable value cell for this IC |
100 RelocIterator _oops; // iteration over any and all set-oop instructions | |
101 bool _is_optimized; // an optimized virtual call (i.e., no compiled IC) | 150 bool _is_optimized; // an optimized virtual call (i.e., no compiled IC) |
102 | 151 |
103 CompiledIC(NativeCall* ic_call); | 152 CompiledIC(nmethod* nm, NativeCall* ic_call); |
104 CompiledIC(Relocation* ic_reloc); // Must be of virtual_call_type/opt_virtual_call_type | 153 |
154 static bool is_icholder_entry(address entry); | |
105 | 155 |
106 // low-level inline-cache manipulation. Cannot be accessed directly, since it might not be MT-safe | 156 // low-level inline-cache manipulation. Cannot be accessed directly, since it might not be MT-safe |
107 // to change an inline-cache. These changes the underlying inline-cache directly. They *newer* make | 157 // to change an inline-cache. These changes the underlying inline-cache directly. They *newer* make |
108 // changes to a transition stub. | 158 // changes to a transition stub. |
109 void set_ic_destination(address entry_point); | 159 void internal_set_ic_destination(address entry_point, bool is_icstub, void* cache, bool is_icholder); |
110 void set_cached_oop(oop cache); | 160 void set_ic_destination(ICStub* stub); |
161 void set_ic_destination(address entry_point) { | |
162 assert(_is_optimized, "use set_ic_destination_and_value instead"); | |
163 internal_set_ic_destination(entry_point, false, NULL, false); | |
164 } | |
165 // This only for use by ICStubs where the type of the value isn't known | |
166 void set_ic_destination_and_value(address entry_point, void* value) { | |
167 internal_set_ic_destination(entry_point, false, value, is_icholder_entry(entry_point)); | |
168 } | |
169 void set_ic_destination_and_value(address entry_point, Metadata* value) { | |
170 internal_set_ic_destination(entry_point, false, value, false); | |
171 } | |
172 void set_ic_destination_and_value(address entry_point, CompiledICHolder* value) { | |
173 internal_set_ic_destination(entry_point, false, value, true); | |
174 } | |
111 | 175 |
112 // Reads the location of the transition stub. This will fail with an assertion, if no transition stub is | 176 // Reads the location of the transition stub. This will fail with an assertion, if no transition stub is |
113 // associated with the inline cache. | 177 // associated with the inline cache. |
114 address stub_address() const; | 178 address stub_address() const; |
115 bool is_in_transition_state() const; // Use InlineCacheBuffer | 179 bool is_in_transition_state() const; // Use InlineCacheBuffer |
116 | 180 |
117 public: | 181 public: |
118 // conversion (machine PC to CompiledIC*) | 182 // conversion (machine PC to CompiledIC*) |
119 friend CompiledIC* CompiledIC_before(address return_addr); | 183 friend CompiledIC* CompiledIC_before(nmethod* nm, address return_addr); |
120 friend CompiledIC* CompiledIC_at(address call_site); | 184 friend CompiledIC* CompiledIC_at(nmethod* nm, address call_site); |
121 friend CompiledIC* CompiledIC_at(Relocation* call_site); | 185 friend CompiledIC* CompiledIC_at(Relocation* call_site); |
122 | 186 |
123 // Return the cached_oop/destination associated with this inline cache. If the cache currently points | 187 // This is used to release CompiledICHolder*s from nmethods that |
188 // are about to be freed. The callsite might contain other stale | |
189 // values of other kinds so it must be careful. | |
190 static void cleanup_call_site(virtual_call_Relocation* call_site); | |
191 static bool is_icholder_call_site(virtual_call_Relocation* call_site); | |
192 | |
193 // Return the cached_metadata/destination associated with this inline cache. If the cache currently points | |
124 // to a transition stub, it will read the values from the transition stub. | 194 // to a transition stub, it will read the values from the transition stub. |
125 oop cached_oop() const; | 195 void* cached_value() const; |
196 CompiledICHolder* cached_icholder() const { | |
197 assert(is_icholder_call(), "must be"); | |
198 return (CompiledICHolder*) cached_value(); | |
199 } | |
200 Metadata* cached_metadata() const { | |
201 assert(!is_icholder_call(), "must be"); | |
202 return (Metadata*) cached_value(); | |
203 } | |
204 | |
126 address ic_destination() const; | 205 address ic_destination() const; |
127 | 206 |
128 bool is_optimized() const { return _is_optimized; } | 207 bool is_optimized() const { return _is_optimized; } |
129 | 208 |
130 // State | 209 // State |
131 bool is_clean() const; | 210 bool is_clean() const; |
132 bool is_megamorphic() const; | 211 bool is_megamorphic() const; |
133 bool is_call_to_compiled() const; | 212 bool is_call_to_compiled() const; |
134 bool is_call_to_interpreted() const; | 213 bool is_call_to_interpreted() const; |
214 | |
215 bool is_icholder_call() const; | |
135 | 216 |
136 address end_of_call() { return _ic_call->return_address(); } | 217 address end_of_call() { return _ic_call->return_address(); } |
137 | 218 |
138 // MT-safe patching of inline caches. Note: Only safe to call is_xxx when holding the CompiledIC_ock | 219 // MT-safe patching of inline caches. Note: Only safe to call is_xxx when holding the CompiledIC_ock |
139 // so you are guaranteed that no patching takes place. The same goes for verify. | 220 // so you are guaranteed that no patching takes place. The same goes for verify. |
142 // to manipulate the inline cache in MT-unsafe ways. | 223 // to manipulate the inline cache in MT-unsafe ways. |
143 // | 224 // |
144 // They all takes a TRAP argument, since they can cause a GC if the inline-cache buffer is full. | 225 // They all takes a TRAP argument, since they can cause a GC if the inline-cache buffer is full. |
145 // | 226 // |
146 void set_to_clean(); // Can only be called during a safepoint operation | 227 void set_to_clean(); // Can only be called during a safepoint operation |
147 void set_to_monomorphic(const CompiledICInfo& info); | 228 void set_to_monomorphic(CompiledICInfo& info); |
148 void set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS); | 229 void set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS); |
149 | 230 |
150 static void compute_monomorphic_entry(methodHandle method, KlassHandle receiver_klass, | 231 static void compute_monomorphic_entry(methodHandle method, KlassHandle receiver_klass, |
151 bool is_optimized, bool static_bound, CompiledICInfo& info, TRAPS); | 232 bool is_optimized, bool static_bound, CompiledICInfo& info, TRAPS); |
152 | 233 |
157 void print() PRODUCT_RETURN; | 238 void print() PRODUCT_RETURN; |
158 void print_compiled_ic() PRODUCT_RETURN; | 239 void print_compiled_ic() PRODUCT_RETURN; |
159 void verify() PRODUCT_RETURN; | 240 void verify() PRODUCT_RETURN; |
160 }; | 241 }; |
161 | 242 |
162 inline CompiledIC* CompiledIC_before(address return_addr) { | 243 inline CompiledIC* CompiledIC_before(nmethod* nm, address return_addr) { |
163 CompiledIC* c_ic = new CompiledIC(nativeCall_before(return_addr)); | 244 CompiledIC* c_ic = new CompiledIC(nm, nativeCall_before(return_addr)); |
164 c_ic->verify(); | 245 c_ic->verify(); |
165 return c_ic; | 246 return c_ic; |
166 } | 247 } |
167 | 248 |
168 inline CompiledIC* CompiledIC_at(address call_site) { | 249 inline CompiledIC* CompiledIC_at(nmethod* nm, address call_site) { |
169 CompiledIC* c_ic = new CompiledIC(nativeCall_at(call_site)); | 250 CompiledIC* c_ic = new CompiledIC(nm, nativeCall_at(call_site)); |
170 c_ic->verify(); | 251 c_ic->verify(); |
171 return c_ic; | 252 return c_ic; |
172 } | 253 } |
173 | 254 |
174 inline CompiledIC* CompiledIC_at(Relocation* call_site) { | 255 inline CompiledIC* CompiledIC_at(Relocation* call_site) { |
175 CompiledIC* c_ic = new CompiledIC(call_site); | 256 assert(call_site->type() == relocInfo::virtual_call_type || |
257 call_site->type() == relocInfo::opt_virtual_call_type, "wrong reloc. info"); | |
258 CompiledIC* c_ic = new CompiledIC(call_site->code(), nativeCall_at(call_site->addr())); | |
176 c_ic->verify(); | 259 c_ic->verify(); |
177 return c_ic; | 260 return c_ic; |
178 } | 261 } |
179 | 262 |
180 | 263 |
189 // / \ | 272 // / \ |
190 // compilled code <------------> interpreted code | 273 // compilled code <------------> interpreted code |
191 // | 274 // |
192 // Clean: Calls directly to runtime method for fixup | 275 // Clean: Calls directly to runtime method for fixup |
193 // Compiled code: Calls directly to compiled code | 276 // Compiled code: Calls directly to compiled code |
194 // Interpreted code: Calls to stub that set methodOop reference | 277 // Interpreted code: Calls to stub that set Method* reference |
195 // | 278 // |
196 // | 279 // |
197 class CompiledStaticCall; | 280 class CompiledStaticCall; |
198 | 281 |
199 class StaticCallInfo { | 282 class StaticCallInfo { |