comparison src/share/vm/opto/library_call.cpp @ 6615:09aad8452938

7190310: Inlining WeakReference.get(), and hoisting $referent may lead to non-terminating loops Summary: In C2 add software membar after load from Reference.referent field to prevent commoning of loads across safepoint since GC can change its value. In C1 always generate Reference.get() intrinsic. Reviewed-by: roland, twisti, dholmes, johnc
author kvn
date Mon, 20 Aug 2012 09:58:58 -0700
parents 1d7922586cf6
children da91efe96a93
comparison
equal deleted inserted replaced
6614:006050192a5a 6615:09aad8452938
169 int classify_unsafe_addr(Node* &base, Node* &offset); 169 int classify_unsafe_addr(Node* &base, Node* &offset);
170 Node* make_unsafe_address(Node* base, Node* offset); 170 Node* make_unsafe_address(Node* base, Node* offset);
171 // Helper for inline_unsafe_access. 171 // Helper for inline_unsafe_access.
172 // Generates the guards that check whether the result of 172 // Generates the guards that check whether the result of
173 // Unsafe.getObject should be recorded in an SATB log buffer. 173 // Unsafe.getObject should be recorded in an SATB log buffer.
174 void insert_g1_pre_barrier(Node* base_oop, Node* offset, Node* pre_val); 174 void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, int nargs, bool need_mem_bar);
175 bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile); 175 bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile);
176 bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static); 176 bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static);
177 bool inline_unsafe_allocate(); 177 bool inline_unsafe_allocate();
178 bool inline_unsafe_copyMemory(); 178 bool inline_unsafe_copyMemory();
179 bool inline_native_currentThread(); 179 bool inline_native_currentThread();
289 case vmIntrinsics::_indexOf: 289 case vmIntrinsics::_indexOf:
290 case vmIntrinsics::_compareTo: 290 case vmIntrinsics::_compareTo:
291 case vmIntrinsics::_equals: 291 case vmIntrinsics::_equals:
292 case vmIntrinsics::_equalsC: 292 case vmIntrinsics::_equalsC:
293 break; // InlineNatives does not control String.compareTo 293 break; // InlineNatives does not control String.compareTo
294 case vmIntrinsics::_Reference_get:
295 break; // InlineNatives does not control Reference.get
294 default: 296 default:
295 return NULL; 297 return NULL;
296 } 298 }
297 } 299 }
298 300
359 case vmIntrinsics::_numberOfTrailingZeros_l: 361 case vmIntrinsics::_numberOfTrailingZeros_l:
360 if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return NULL; 362 if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return NULL;
361 break; 363 break;
362 364
363 case vmIntrinsics::_Reference_get: 365 case vmIntrinsics::_Reference_get:
364 // It is only when G1 is enabled that we absolutely 366 // Use the intrinsic version of Reference.get() so that the value in
365 // need to use the intrinsic version of Reference.get() 367 // the referent field can be registered by the G1 pre-barrier code.
366 // so that the value in the referent field, if necessary, 368 // Also add memory barrier to prevent commoning reads from this field
367 // can be registered by the pre-barrier code. 369 // across safepoint since GC can change it value.
368 if (!UseG1GC) return NULL;
369 break; 370 break;
370 371
371 default: 372 default:
372 assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility"); 373 assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility");
373 assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?"); 374 assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?");
2193 2194
2194 //----------------------------inline_unsafe_access---------------------------- 2195 //----------------------------inline_unsafe_access----------------------------
2195 2196
2196 const static BasicType T_ADDRESS_HOLDER = T_LONG; 2197 const static BasicType T_ADDRESS_HOLDER = T_LONG;
2197 2198
2198 // Helper that guards and inserts a G1 pre-barrier. 2199 // Helper that guards and inserts a pre-barrier.
2199 void LibraryCallKit::insert_g1_pre_barrier(Node* base_oop, Node* offset, Node* pre_val) { 2200 void LibraryCallKit::insert_pre_barrier(Node* base_oop, Node* offset,
2200 assert(UseG1GC, "should not call this otherwise"); 2201 Node* pre_val, int nargs, bool need_mem_bar) {
2201
2202 // We could be accessing the referent field of a reference object. If so, when G1 2202 // We could be accessing the referent field of a reference object. If so, when G1
2203 // is enabled, we need to log the value in the referent field in an SATB buffer. 2203 // is enabled, we need to log the value in the referent field in an SATB buffer.
2204 // This routine performs some compile time filters and generates suitable 2204 // This routine performs some compile time filters and generates suitable
2205 // runtime filters that guard the pre-barrier code. 2205 // runtime filters that guard the pre-barrier code.
2206 // Also add memory barrier for non volatile load from the referent field
2207 // to prevent commoning of loads across safepoint.
2208 if (!UseG1GC && !need_mem_bar)
2209 return;
2206 2210
2207 // Some compile time checks. 2211 // Some compile time checks.
2208 2212
2209 // If offset is a constant, is it java_lang_ref_Reference::_reference_offset? 2213 // If offset is a constant, is it java_lang_ref_Reference::_reference_offset?
2210 const TypeX* otype = offset->find_intptr_t_type(); 2214 const TypeX* otype = offset->find_intptr_t_type();
2222 return; 2226 return;
2223 } 2227 }
2224 2228
2225 const TypeInstPtr* itype = btype->isa_instptr(); 2229 const TypeInstPtr* itype = btype->isa_instptr();
2226 if (itype != NULL) { 2230 if (itype != NULL) {
2227 // Can the klass of base_oop be statically determined 2231 // Can the klass of base_oop be statically determined to be
2228 // to be _not_ a sub-class of Reference? 2232 // _not_ a sub-class of Reference and _not_ Object?
2229 ciKlass* klass = itype->klass(); 2233 ciKlass* klass = itype->klass();
2230 if (klass->is_subtype_of(env()->Reference_klass()) && 2234 if ( klass->is_loaded() &&
2231 !env()->Reference_klass()->is_subtype_of(klass)) { 2235 !klass->is_subtype_of(env()->Reference_klass()) &&
2236 !env()->Object_klass()->is_subtype_of(klass)) {
2232 return; 2237 return;
2233 } 2238 }
2234 } 2239 }
2235 } 2240 }
2236 2241
2237 // The compile time filters did not reject base_oop/offset so 2242 // The compile time filters did not reject base_oop/offset so
2238 // we need to generate the following runtime filters 2243 // we need to generate the following runtime filters
2239 // 2244 //
2240 // if (offset == java_lang_ref_Reference::_reference_offset) { 2245 // if (offset == java_lang_ref_Reference::_reference_offset) {
2241 // if (base != null) { 2246 // if (instance_of(base, java.lang.ref.Reference)) {
2242 // if (instance_of(base, java.lang.ref.Reference)) { 2247 // pre_barrier(_, pre_val, ...);
2243 // pre_barrier(_, pre_val, ...);
2244 // }
2245 // } 2248 // }
2246 // } 2249 // }
2247 2250
2248 float likely = PROB_LIKELY(0.999); 2251 float likely = PROB_LIKELY(0.999);
2249 float unlikely = PROB_UNLIKELY(0.999); 2252 float unlikely = PROB_UNLIKELY(0.999);
2252 #define __ ideal. 2255 #define __ ideal.
2253 2256
2254 Node* referent_off = __ ConX(java_lang_ref_Reference::referent_offset); 2257 Node* referent_off = __ ConX(java_lang_ref_Reference::referent_offset);
2255 2258
2256 __ if_then(offset, BoolTest::eq, referent_off, unlikely); { 2259 __ if_then(offset, BoolTest::eq, referent_off, unlikely); {
2257 __ if_then(base_oop, BoolTest::ne, null(), likely); {
2258
2259 // Update graphKit memory and control from IdealKit. 2260 // Update graphKit memory and control from IdealKit.
2260 sync_kit(ideal); 2261 sync_kit(ideal);
2261 2262
2262 Node* ref_klass_con = makecon(TypeKlassPtr::make(env()->Reference_klass())); 2263 Node* ref_klass_con = makecon(TypeKlassPtr::make(env()->Reference_klass()));
2264 _sp += nargs; // gen_instanceof might do an uncommon trap
2263 Node* is_instof = gen_instanceof(base_oop, ref_klass_con); 2265 Node* is_instof = gen_instanceof(base_oop, ref_klass_con);
2266 _sp -= nargs;
2264 2267
2265 // Update IdealKit memory and control from graphKit. 2268 // Update IdealKit memory and control from graphKit.
2266 __ sync_kit(this); 2269 __ sync_kit(this);
2267 2270
2268 Node* one = __ ConI(1); 2271 Node* one = __ ConI(1);
2269 2272 // is_instof == 0 if base_oop == NULL
2270 __ if_then(is_instof, BoolTest::eq, one, unlikely); { 2273 __ if_then(is_instof, BoolTest::eq, one, unlikely); {
2271 2274
2272 // Update graphKit from IdeakKit. 2275 // Update graphKit from IdeakKit.
2273 sync_kit(ideal); 2276 sync_kit(ideal);
2274 2277
2276 pre_barrier(false /* do_load */, 2279 pre_barrier(false /* do_load */,
2277 __ ctrl(), 2280 __ ctrl(),
2278 NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */, NULL /* val_type */, 2281 NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */, NULL /* val_type */,
2279 pre_val /* pre_val */, 2282 pre_val /* pre_val */,
2280 T_OBJECT); 2283 T_OBJECT);
2281 2284 if (need_mem_bar) {
2285 // Add memory barrier to prevent commoning reads from this field
2286 // across safepoint since GC can change its value.
2287 insert_mem_bar(Op_MemBarCPUOrder);
2288 }
2282 // Update IdealKit from graphKit. 2289 // Update IdealKit from graphKit.
2283 __ sync_kit(this); 2290 __ sync_kit(this);
2284 2291
2285 } __ end_if(); // _ref_type != ref_none 2292 } __ end_if(); // _ref_type != ref_none
2286 } __ end_if(); // base != NULL
2287 } __ end_if(); // offset == referent_offset 2293 } __ end_if(); // offset == referent_offset
2288 2294
2289 // Final sync IdealKit and GraphKit. 2295 // Final sync IdealKit and GraphKit.
2290 final_sync(ideal); 2296 final_sync(ideal);
2291 #undef __ 2297 #undef __
2416 2422
2417 // If we are reading the value of the referent field of a Reference 2423 // If we are reading the value of the referent field of a Reference
2418 // object (either by using Unsafe directly or through reflection) 2424 // object (either by using Unsafe directly or through reflection)
2419 // then, if G1 is enabled, we need to record the referent in an 2425 // then, if G1 is enabled, we need to record the referent in an
2420 // SATB log buffer using the pre-barrier mechanism. 2426 // SATB log buffer using the pre-barrier mechanism.
2421 bool need_read_barrier = UseG1GC && !is_native_ptr && !is_store && 2427 // Also we need to add memory barrier to prevent commoning reads
2428 // from this field across safepoint since GC can change its value.
2429 bool need_read_barrier = !is_native_ptr && !is_store &&
2422 offset != top() && heap_base_oop != top(); 2430 offset != top() && heap_base_oop != top();
2423 2431
2424 if (!is_store && type == T_OBJECT) { 2432 if (!is_store && type == T_OBJECT) {
2425 // Attempt to infer a sharper value type from the offset and base type. 2433 // Attempt to infer a sharper value type from the offset and base type.
2426 ciKlass* sharpened_klass = NULL; 2434 ciKlass* sharpened_klass = NULL;
2506 case T_FLOAT: 2514 case T_FLOAT:
2507 push(p); 2515 push(p);
2508 break; 2516 break;
2509 case T_OBJECT: 2517 case T_OBJECT:
2510 if (need_read_barrier) { 2518 if (need_read_barrier) {
2511 insert_g1_pre_barrier(heap_base_oop, offset, p); 2519 insert_pre_barrier(heap_base_oop, offset, p, nargs, !(is_volatile || need_mem_bar));
2512 } 2520 }
2513 push(p); 2521 push(p);
2514 break; 2522 break;
2515 case T_ADDRESS: 2523 case T_ADDRESS:
2516 // Cast to an int type. 2524 // Cast to an int type.
5482 control(), 5490 control(),
5483 NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */, NULL /* val_type */, 5491 NULL /* obj */, NULL /* adr */, max_juint /* alias_idx */, NULL /* val */, NULL /* val_type */,
5484 result /* pre_val */, 5492 result /* pre_val */,
5485 T_OBJECT); 5493 T_OBJECT);
5486 5494
5495 // Add memory barrier to prevent commoning reads from this field
5496 // across safepoint since GC can change its value.
5497 insert_mem_bar(Op_MemBarCPUOrder);
5498
5487 push(result); 5499 push(result);
5488 return true; 5500 return true;
5489 } 5501 }