Mercurial > hg > truffle
comparison src/share/vm/opto/library_call.cpp @ 3252:92add02409c9
Merge
author | jmasa |
---|---|
date | Fri, 08 Apr 2011 14:19:50 -0700 |
parents | 3d58a4983660 e1162778c1c8 |
children | 59766fd005ff |
comparison
equal
deleted
inserted
replaced
2437:4f978fb6c81a | 3252:92add02409c9 |
---|---|
164 bool inline_min_max(vmIntrinsics::ID id); | 164 bool inline_min_max(vmIntrinsics::ID id); |
165 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y); | 165 Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y); |
166 // This returns Type::AnyPtr, RawPtr, or OopPtr. | 166 // This returns Type::AnyPtr, RawPtr, or OopPtr. |
167 int classify_unsafe_addr(Node* &base, Node* &offset); | 167 int classify_unsafe_addr(Node* &base, Node* &offset); |
168 Node* make_unsafe_address(Node* base, Node* offset); | 168 Node* make_unsafe_address(Node* base, Node* offset); |
169 // Helper for inline_unsafe_access. | |
170 // Generates the guards that check whether the result of | |
171 // Unsafe.getObject should be recorded in an SATB log buffer. | |
172 void insert_g1_pre_barrier(Node* base_oop, Node* offset, Node* pre_val); | |
169 bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile); | 173 bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile); |
170 bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static); | 174 bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static); |
171 bool inline_unsafe_allocate(); | 175 bool inline_unsafe_allocate(); |
172 bool inline_unsafe_copyMemory(); | 176 bool inline_unsafe_copyMemory(); |
173 bool inline_native_currentThread(); | 177 bool inline_native_currentThread(); |
238 bool inline_fp_conversions(vmIntrinsics::ID id); | 242 bool inline_fp_conversions(vmIntrinsics::ID id); |
239 bool inline_numberOfLeadingZeros(vmIntrinsics::ID id); | 243 bool inline_numberOfLeadingZeros(vmIntrinsics::ID id); |
240 bool inline_numberOfTrailingZeros(vmIntrinsics::ID id); | 244 bool inline_numberOfTrailingZeros(vmIntrinsics::ID id); |
241 bool inline_bitCount(vmIntrinsics::ID id); | 245 bool inline_bitCount(vmIntrinsics::ID id); |
242 bool inline_reverseBytes(vmIntrinsics::ID id); | 246 bool inline_reverseBytes(vmIntrinsics::ID id); |
247 | |
248 bool inline_reference_get(); | |
243 }; | 249 }; |
244 | 250 |
245 | 251 |
246 //---------------------------make_vm_intrinsic---------------------------- | 252 //---------------------------make_vm_intrinsic---------------------------- |
247 CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) { | 253 CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) { |
334 case vmIntrinsics::_bitCount_i: | 340 case vmIntrinsics::_bitCount_i: |
335 case vmIntrinsics::_bitCount_l: | 341 case vmIntrinsics::_bitCount_l: |
336 if (!UsePopCountInstruction) return NULL; | 342 if (!UsePopCountInstruction) return NULL; |
337 break; | 343 break; |
338 | 344 |
345 case vmIntrinsics::_Reference_get: | |
346 // It is only when G1 is enabled that we absolutely | |
347 // need to use the intrinsic version of Reference.get() | |
348 // so that the value in the referent field, if necessary, | |
349 // can be registered by the pre-barrier code. | |
350 if (!UseG1GC) return NULL; | |
351 break; | |
352 | |
339 default: | 353 default: |
340 assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility"); | 354 assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility"); |
341 assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?"); | 355 assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?"); |
342 break; | 356 break; |
343 } | 357 } |
385 char buf[1000]; | 399 char buf[1000]; |
386 const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf)); | 400 const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf)); |
387 tty->print_cr("Intrinsic %s", str); | 401 tty->print_cr("Intrinsic %s", str); |
388 } | 402 } |
389 #endif | 403 #endif |
404 | |
390 if (kit.try_to_inline()) { | 405 if (kit.try_to_inline()) { |
391 if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) { | 406 if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) { |
392 CompileTask::print_inlining(kit.callee(), jvms->depth() - 1, kit.bci(), is_virtual() ? "(intrinsic, virtual)" : "(intrinsic)"); | 407 CompileTask::print_inlining(kit.callee(), jvms->depth() - 1, kit.bci(), is_virtual() ? "(intrinsic, virtual)" : "(intrinsic)"); |
393 } | 408 } |
394 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_worked); | 409 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_worked); |
400 } | 415 } |
401 return kit.transfer_exceptions_into_jvms(); | 416 return kit.transfer_exceptions_into_jvms(); |
402 } | 417 } |
403 | 418 |
404 if (PrintIntrinsics) { | 419 if (PrintIntrinsics) { |
405 tty->print("Did not inline intrinsic %s%s at bci:%d in", | 420 if (jvms->has_method()) { |
421 // Not a root compile. | |
422 tty->print("Did not inline intrinsic %s%s at bci:%d in", | |
423 vmIntrinsics::name_at(intrinsic_id()), | |
424 (is_virtual() ? " (virtual)" : ""), kit.bci()); | |
425 kit.caller()->print_short_name(tty); | |
426 tty->print_cr(" (%d bytes)", kit.caller()->code_size()); | |
427 } else { | |
428 // Root compile | |
429 tty->print("Did not generate intrinsic %s%s at bci:%d in", | |
406 vmIntrinsics::name_at(intrinsic_id()), | 430 vmIntrinsics::name_at(intrinsic_id()), |
407 (is_virtual() ? " (virtual)" : ""), kit.bci()); | 431 (is_virtual() ? " (virtual)" : ""), kit.bci()); |
408 kit.caller()->print_short_name(tty); | 432 } |
409 tty->print_cr(" (%d bytes)", kit.caller()->code_size()); | |
410 } | 433 } |
411 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed); | 434 C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed); |
412 return NULL; | 435 return NULL; |
413 } | 436 } |
414 | 437 |
415 bool LibraryCallKit::try_to_inline() { | 438 bool LibraryCallKit::try_to_inline() { |
416 // Handle symbolic names for otherwise undistinguished boolean switches: | 439 // Handle symbolic names for otherwise undistinguished boolean switches: |
417 const bool is_store = true; | 440 const bool is_store = true; |
418 const bool is_native_ptr = true; | 441 const bool is_native_ptr = true; |
419 const bool is_static = true; | 442 const bool is_static = true; |
443 | |
444 if (!jvms()->has_method()) { | |
445 // Root JVMState has a null method. | |
446 assert(map()->memory()->Opcode() == Op_Parm, ""); | |
447 // Insert the memory aliasing node | |
448 set_all_memory(reset_memory()); | |
449 } | |
450 assert(merged_memory(), ""); | |
420 | 451 |
421 switch (intrinsic_id()) { | 452 switch (intrinsic_id()) { |
422 case vmIntrinsics::_hashCode: | 453 case vmIntrinsics::_hashCode: |
423 return inline_native_hashcode(intrinsic()->is_virtual(), !is_static); | 454 return inline_native_hashcode(intrinsic()->is_virtual(), !is_static); |
424 case vmIntrinsics::_identityHashCode: | 455 case vmIntrinsics::_identityHashCode: |
655 case vmIntrinsics::_attemptUpdate: | 686 case vmIntrinsics::_attemptUpdate: |
656 return inline_native_AtomicLong_attemptUpdate(); | 687 return inline_native_AtomicLong_attemptUpdate(); |
657 | 688 |
658 case vmIntrinsics::_getCallerClass: | 689 case vmIntrinsics::_getCallerClass: |
659 return inline_native_Reflection_getCallerClass(); | 690 return inline_native_Reflection_getCallerClass(); |
691 | |
692 case vmIntrinsics::_Reference_get: | |
693 return inline_reference_get(); | |
660 | 694 |
661 default: | 695 default: |
662 // If you get here, it may be that someone has added a new intrinsic | 696 // If you get here, it may be that someone has added a new intrinsic |
663 // to the list in vmSymbols.hpp without implementing it here. | 697 // to the list in vmSymbols.hpp without implementing it here. |
664 #ifndef PRODUCT | 698 #ifndef PRODUCT |
2074 | 2108 |
2075 //----------------------------inline_unsafe_access---------------------------- | 2109 //----------------------------inline_unsafe_access---------------------------- |
2076 | 2110 |
2077 const static BasicType T_ADDRESS_HOLDER = T_LONG; | 2111 const static BasicType T_ADDRESS_HOLDER = T_LONG; |
2078 | 2112 |
2113 // Helper that guards and inserts a G1 pre-barrier. | |
2114 void LibraryCallKit::insert_g1_pre_barrier(Node* base_oop, Node* offset, Node* pre_val) { | |
2115 assert(UseG1GC, "should not call this otherwise"); | |
2116 | |
2117 // We could be accessing the referent field of a reference object. If so, when G1 | |
2118 // is enabled, we need to log the value in the referent field in an SATB buffer. | |
2119 // This routine performs some compile time filters and generates suitable | |
2120 // runtime filters that guard the pre-barrier code. | |
2121 | |
2122 // Some compile time checks. | |
2123 | |
2124 // If offset is a constant, is it java_lang_ref_Reference::_reference_offset? | |
2125 const TypeX* otype = offset->find_intptr_t_type(); | |
2126 if (otype != NULL && otype->is_con() && | |
2127 otype->get_con() != java_lang_ref_Reference::referent_offset) { | |
2128 // Constant offset but not the reference_offset so just return | |
2129 return; | |
2130 } | |
2131 | |
2132 // We only need to generate the runtime guards for instances. | |
2133 const TypeOopPtr* btype = base_oop->bottom_type()->isa_oopptr(); | |
2134 if (btype != NULL) { | |
2135 if (btype->isa_aryptr()) { | |
2136 // Array type so nothing to do | |
2137 return; | |
2138 } | |
2139 | |
2140 const TypeInstPtr* itype = btype->isa_instptr(); | |
2141 if (itype != NULL) { | |
2142 // Can the klass of base_oop be statically determined | |
2143 // to be _not_ a sub-class of Reference? | |
2144 ciKlass* klass = itype->klass(); | |
2145 if (klass->is_subtype_of(env()->Reference_klass()) && | |
2146 !env()->Reference_klass()->is_subtype_of(klass)) { | |
2147 return; | |
2148 } | |
2149 } | |
2150 } | |
2151 | |
2152 // The compile time filters did not reject base_oop/offset so | |
2153 // we need to generate the following runtime filters | |
2154 // | |
2155 // if (offset == java_lang_ref_Reference::_reference_offset) { | |
2156 // if (base != null) { | |
2157 // if (klass(base)->reference_type() != REF_NONE)) { | |
2158 // pre_barrier(_, pre_val, ...); | |
2159 // } | |
2160 // } | |
2161 // } | |
2162 | |
2163 float likely = PROB_LIKELY(0.999); | |
2164 float unlikely = PROB_UNLIKELY(0.999); | |
2165 | |
2166 IdealKit ideal(gvn(), control(), merged_memory()); | |
2167 #define __ ideal. | |
2168 | |
2169 const int reference_type_offset = instanceKlass::reference_type_offset_in_bytes() + | |
2170 sizeof(oopDesc); | |
2171 | |
2172 Node* referent_off = __ ConI(java_lang_ref_Reference::referent_offset); | |
2173 | |
2174 __ if_then(offset, BoolTest::eq, referent_off, unlikely); { | |
2175 __ if_then(base_oop, BoolTest::ne, null(), likely); { | |
2176 | |
2177 // Update graphKit memory and control from IdealKit. | |
2178 set_all_memory(__ merged_memory()); | |
2179 set_control(__ ctrl()); | |
2180 | |
2181 Node* ref_klass_con = makecon(TypeKlassPtr::make(env()->Reference_klass())); | |
2182 Node* is_instof = gen_instanceof(base_oop, ref_klass_con); | |
2183 | |
2184 // Update IdealKit memory and control from graphKit. | |
2185 __ set_all_memory(merged_memory()); | |
2186 __ set_ctrl(control()); | |
2187 | |
2188 Node* one = __ ConI(1); | |
2189 | |
2190 __ if_then(is_instof, BoolTest::eq, one, unlikely); { | |
2191 | |
2192 // Update graphKit from IdeakKit. | |
2193 set_all_memory(__ merged_memory()); | |
2194 set_control(__ ctrl()); | |
2195 | |
2196 // Use the pre-barrier to record the value in the referent field | |
2197 pre_barrier(false /* do_load */, | |
2198 __ ctrl(), | |
2199 NULL /* obj */, NULL /* adr */, -1 /* alias_idx */, NULL /* val */, NULL /* val_type */, | |
2200 pre_val /* pre_val */, | |
2201 T_OBJECT); | |
2202 | |
2203 // Update IdealKit from graphKit. | |
2204 __ set_all_memory(merged_memory()); | |
2205 __ set_ctrl(control()); | |
2206 | |
2207 } __ end_if(); // _ref_type != ref_none | |
2208 } __ end_if(); // base != NULL | |
2209 } __ end_if(); // offset == referent_offset | |
2210 | |
2211 // Final sync IdealKit and GraphKit. | |
2212 sync_kit(ideal); | |
2213 #undef __ | |
2214 } | |
2215 | |
2216 | |
2079 // Interpret Unsafe.fieldOffset cookies correctly: | 2217 // Interpret Unsafe.fieldOffset cookies correctly: |
2080 extern jlong Unsafe_field_offset_to_byte_offset(jlong field_offset); | 2218 extern jlong Unsafe_field_offset_to_byte_offset(jlong field_offset); |
2081 | 2219 |
2082 bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile) { | 2220 bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile) { |
2083 if (callee()->is_static()) return false; // caller must have the capability! | 2221 if (callee()->is_static()) return false; // caller must have the capability! |
2150 } | 2288 } |
2151 | 2289 |
2152 // Build address expression. See the code in inline_unsafe_prefetch. | 2290 // Build address expression. See the code in inline_unsafe_prefetch. |
2153 Node *adr; | 2291 Node *adr; |
2154 Node *heap_base_oop = top(); | 2292 Node *heap_base_oop = top(); |
2293 Node* offset = top(); | |
2294 | |
2155 if (!is_native_ptr) { | 2295 if (!is_native_ptr) { |
2156 // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset | 2296 // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset |
2157 Node* offset = pop_pair(); | 2297 offset = pop_pair(); |
2158 // The base is either a Java object or a value produced by Unsafe.staticFieldBase | 2298 // The base is either a Java object or a value produced by Unsafe.staticFieldBase |
2159 Node* base = pop(); | 2299 Node* base = pop(); |
2160 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset | 2300 // We currently rely on the cookies produced by Unsafe.xxxFieldOffset |
2161 // to be plain byte offsets, which are also the same as those accepted | 2301 // to be plain byte offsets, which are also the same as those accepted |
2162 // by oopDesc::field_base. | 2302 // by oopDesc::field_base. |
2193 // the barriers get omitted and the unsafe reference begins to "pollute" | 2333 // the barriers get omitted and the unsafe reference begins to "pollute" |
2194 // the alias analysis of the rest of the graph, either Compile::can_alias | 2334 // the alias analysis of the rest of the graph, either Compile::can_alias |
2195 // or Compile::must_alias will throw a diagnostic assert.) | 2335 // or Compile::must_alias will throw a diagnostic assert.) |
2196 bool need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM); | 2336 bool need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM); |
2197 | 2337 |
2338 // If we are reading the value of the referent field of a Reference | |
2339 // object (either by using Unsafe directly or through reflection) | |
2340 // then, if G1 is enabled, we need to record the referent in an | |
2341 // SATB log buffer using the pre-barrier mechanism. | |
2342 bool need_read_barrier = UseG1GC && !is_native_ptr && !is_store && | |
2343 offset != top() && heap_base_oop != top(); | |
2344 | |
2198 if (!is_store && type == T_OBJECT) { | 2345 if (!is_store && type == T_OBJECT) { |
2199 // Attempt to infer a sharper value type from the offset and base type. | 2346 // Attempt to infer a sharper value type from the offset and base type. |
2200 ciKlass* sharpened_klass = NULL; | 2347 ciKlass* sharpened_klass = NULL; |
2201 | 2348 |
2202 // See if it is an instance field, with an object type. | 2349 // See if it is an instance field, with an object type. |
2276 case T_CHAR: | 2423 case T_CHAR: |
2277 case T_BYTE: | 2424 case T_BYTE: |
2278 case T_SHORT: | 2425 case T_SHORT: |
2279 case T_INT: | 2426 case T_INT: |
2280 case T_FLOAT: | 2427 case T_FLOAT: |
2428 push(p); | |
2429 break; | |
2281 case T_OBJECT: | 2430 case T_OBJECT: |
2282 push( p ); | 2431 if (need_read_barrier) { |
2432 insert_g1_pre_barrier(heap_base_oop, offset, p); | |
2433 } | |
2434 push(p); | |
2283 break; | 2435 break; |
2284 case T_ADDRESS: | 2436 case T_ADDRESS: |
2285 // Cast to an int type. | 2437 // Cast to an int type. |
2286 p = _gvn.transform( new (C, 2) CastP2XNode(NULL,p) ); | 2438 p = _gvn.transform( new (C, 2) CastP2XNode(NULL,p) ); |
2287 p = ConvX2L(p); | 2439 p = ConvX2L(p); |
2534 cas = _gvn.transform(new (C, 5) CompareAndSwapLNode(control(), mem, adr, newval, oldval)); | 2686 cas = _gvn.transform(new (C, 5) CompareAndSwapLNode(control(), mem, adr, newval, oldval)); |
2535 break; | 2687 break; |
2536 case T_OBJECT: | 2688 case T_OBJECT: |
2537 // reference stores need a store barrier. | 2689 // reference stores need a store barrier. |
2538 // (They don't if CAS fails, but it isn't worth checking.) | 2690 // (They don't if CAS fails, but it isn't worth checking.) |
2539 pre_barrier(control(), base, adr, alias_idx, newval, value_type->make_oopptr(), T_OBJECT); | 2691 pre_barrier(true /* do_load*/, |
2692 control(), base, adr, alias_idx, newval, value_type->make_oopptr(), | |
2693 NULL /* pre_val*/, | |
2694 T_OBJECT); | |
2540 #ifdef _LP64 | 2695 #ifdef _LP64 |
2541 if (adr->bottom_type()->is_ptr_to_narrowoop()) { | 2696 if (adr->bottom_type()->is_ptr_to_narrowoop()) { |
2542 Node *newval_enc = _gvn.transform(new (C, 2) EncodePNode(newval, newval->bottom_type()->make_narrowoop())); | 2697 Node *newval_enc = _gvn.transform(new (C, 2) EncodePNode(newval, newval->bottom_type()->make_narrowoop())); |
2543 Node *oldval_enc = _gvn.transform(new (C, 2) EncodePNode(oldval, oldval->bottom_type()->make_narrowoop())); | 2698 Node *oldval_enc = _gvn.transform(new (C, 2) EncodePNode(oldval, oldval->bottom_type()->make_narrowoop())); |
2544 cas = _gvn.transform(new (C, 5) CompareAndSwapNNode(control(), mem, adr, | 2699 cas = _gvn.transform(new (C, 5) CompareAndSwapNNode(control(), mem, adr, |
5310 make_runtime_call(RC_LEAF|RC_NO_FP, | 5465 make_runtime_call(RC_LEAF|RC_NO_FP, |
5311 OptoRuntime::fast_arraycopy_Type(), | 5466 OptoRuntime::fast_arraycopy_Type(), |
5312 copyfunc_addr, copyfunc_name, adr_type, | 5467 copyfunc_addr, copyfunc_name, adr_type, |
5313 src_start, dest_start, copy_length XTOP); | 5468 src_start, dest_start, copy_length XTOP); |
5314 } | 5469 } |
5470 | |
5471 //----------------------------inline_reference_get---------------------------- | |
5472 | |
5473 bool LibraryCallKit::inline_reference_get() { | |
5474 const int nargs = 1; // self | |
5475 | |
5476 guarantee(java_lang_ref_Reference::referent_offset > 0, | |
5477 "should have already been set"); | |
5478 | |
5479 int referent_offset = java_lang_ref_Reference::referent_offset; | |
5480 | |
5481 // Restore the stack and pop off the argument | |
5482 _sp += nargs; | |
5483 Node *reference_obj = pop(); | |
5484 | |
5485 // Null check on self without removing any arguments. | |
5486 _sp += nargs; | |
5487 reference_obj = do_null_check(reference_obj, T_OBJECT); | |
5488 _sp -= nargs;; | |
5489 | |
5490 if (stopped()) return true; | |
5491 | |
5492 Node *adr = basic_plus_adr(reference_obj, reference_obj, referent_offset); | |
5493 | |
5494 ciInstanceKlass* klass = env()->Object_klass(); | |
5495 const TypeOopPtr* object_type = TypeOopPtr::make_from_klass(klass); | |
5496 | |
5497 Node* no_ctrl = NULL; | |
5498 Node *result = make_load(no_ctrl, adr, object_type, T_OBJECT); | |
5499 | |
5500 // Use the pre-barrier to record the value in the referent field | |
5501 pre_barrier(false /* do_load */, | |
5502 control(), | |
5503 NULL /* obj */, NULL /* adr */, -1 /* alias_idx */, NULL /* val */, NULL /* val_type */, | |
5504 result /* pre_val */, | |
5505 T_OBJECT); | |
5506 | |
5507 push(result); | |
5508 return true; | |
5509 } | |
5510 |