comparison src/share/vm/opto/graphKit.cpp @ 14422:2b8e28fdf503

Merge
author kvn
date Tue, 05 Nov 2013 17:38:04 -0800
parents b2ee5dc63353
children 096c224171c4 de6a9e811145 2113136690bc
comparison
equal deleted inserted replaced
14421:3068270ba476 14422:2b8e28fdf503
637 debug_only(kit->verify_map()); 637 debug_only(kit->verify_map());
638 _kit = kit; 638 _kit = kit;
639 _map = kit->map(); // preserve the map 639 _map = kit->map(); // preserve the map
640 _sp = kit->sp(); 640 _sp = kit->sp();
641 kit->set_map(clone_map ? kit->clone_map() : NULL); 641 kit->set_map(clone_map ? kit->clone_map() : NULL);
642 Compile::current()->inc_preserve_jvm_state();
642 #ifdef ASSERT 643 #ifdef ASSERT
643 _bci = kit->bci(); 644 _bci = kit->bci();
644 Parse* parser = kit->is_Parse(); 645 Parse* parser = kit->is_Parse();
645 int block = (parser == NULL || parser->block() == NULL) ? -1 : parser->block()->rpo(); 646 int block = (parser == NULL || parser->block() == NULL) ? -1 : parser->block()->rpo();
646 _block = block; 647 _block = block;
654 int block = (parser == NULL || parser->block() == NULL) ? -1 : parser->block()->rpo(); 655 int block = (parser == NULL || parser->block() == NULL) ? -1 : parser->block()->rpo();
655 assert(block == _block, "block must not shift"); 656 assert(block == _block, "block must not shift");
656 #endif 657 #endif
657 kit->set_map(_map); 658 kit->set_map(_map);
658 kit->set_sp(_sp); 659 kit->set_sp(_sp);
660 Compile::current()->dec_preserve_jvm_state();
659 } 661 }
660 662
661 663
662 //-----------------------------BuildCutout------------------------------------- 664 //-----------------------------BuildCutout-------------------------------------
663 BuildCutout::BuildCutout(GraphKit* kit, Node* p, float prob, float cnt) 665 BuildCutout::BuildCutout(GraphKit* kit, Node* p, float prob, float cnt)
1371 } 1373 }
1372 1374
1373 1375
1374 //--------------------------replace_in_map------------------------------------- 1376 //--------------------------replace_in_map-------------------------------------
1375 void GraphKit::replace_in_map(Node* old, Node* neww) { 1377 void GraphKit::replace_in_map(Node* old, Node* neww) {
1376 this->map()->replace_edge(old, neww); 1378 if (old == neww) {
1379 return;
1380 }
1381
1382 map()->replace_edge(old, neww);
1377 1383
1378 // Note: This operation potentially replaces any edge 1384 // Note: This operation potentially replaces any edge
1379 // on the map. This includes locals, stack, and monitors 1385 // on the map. This includes locals, stack, and monitors
1380 // of the current (innermost) JVM state. 1386 // of the current (innermost) JVM state.
1381 1387
1382 // We can consider replacing in caller maps. 1388 if (!ReplaceInParentMaps) {
1383 // The idea would be that an inlined function's null checks 1389 return;
1384 // can be shared with the entire inlining tree. 1390 }
1385 // The expense of doing this is that the PreserveJVMState class 1391
1386 // would have to preserve caller states too, with a deep copy. 1392 // PreserveJVMState doesn't do a deep copy so we can't modify
1393 // parents
1394 if (Compile::current()->has_preserve_jvm_state()) {
1395 return;
1396 }
1397
1398 Parse* parser = is_Parse();
1399 bool progress = true;
1400 Node* ctrl = map()->in(0);
1401 // Follow the chain of parsers and see whether the update can be
1402 // done in the map of callers. We can do the replace for a caller if
1403 // the current control post dominates the control of a caller.
1404 while (parser != NULL && parser->caller() != NULL && progress) {
1405 progress = false;
1406 Node* parent_map = parser->caller()->map();
1407 assert(parser->exits().map()->jvms()->depth() == parser->caller()->depth(), "map mismatch");
1408
1409 Node* parent_ctrl = parent_map->in(0);
1410
1411 while (parent_ctrl->is_Region()) {
1412 Node* n = parent_ctrl->as_Region()->is_copy();
1413 if (n == NULL) {
1414 break;
1415 }
1416 parent_ctrl = n;
1417 }
1418
1419 for (;;) {
1420 if (ctrl == parent_ctrl) {
1421 // update the map of the exits which is the one that will be
1422 // used when compilation resume after inlining
1423 parser->exits().map()->replace_edge(old, neww);
1424 progress = true;
1425 break;
1426 }
1427 if (ctrl->is_Proj() && ctrl->as_Proj()->is_uncommon_trap_if_pattern(Deoptimization::Reason_none)) {
1428 ctrl = ctrl->in(0)->in(0);
1429 } else if (ctrl->is_Region()) {
1430 Node* n = ctrl->as_Region()->is_copy();
1431 if (n == NULL) {
1432 break;
1433 }
1434 ctrl = n;
1435 } else {
1436 break;
1437 }
1438 }
1439
1440 parser = parser->parent_parser();
1441 }
1387 } 1442 }
1388 1443
1389 1444
1390 //============================================================================= 1445 //=============================================================================
1391 //--------------------------------memory--------------------------------------- 1446 //--------------------------------memory---------------------------------------
1497 case BarrierSet::Other: 1552 case BarrierSet::Other:
1498 default : 1553 default :
1499 ShouldNotReachHere(); 1554 ShouldNotReachHere();
1500 1555
1501 } 1556 }
1557 }
1558
1559 bool GraphKit::can_move_pre_barrier() const {
1560 BarrierSet* bs = Universe::heap()->barrier_set();
1561 switch (bs->kind()) {
1562 case BarrierSet::G1SATBCT:
1563 case BarrierSet::G1SATBCTLogging:
1564 return true; // Can move it if no safepoint
1565
1566 case BarrierSet::CardTableModRef:
1567 case BarrierSet::CardTableExtension:
1568 case BarrierSet::ModRef:
1569 return true; // There is no pre-barrier
1570
1571 case BarrierSet::Other:
1572 default :
1573 ShouldNotReachHere();
1574 }
1575 return false;
1502 } 1576 }
1503 1577
1504 void GraphKit::post_barrier(Node* ctl, 1578 void GraphKit::post_barrier(Node* ctl,
1505 Node* store, 1579 Node* store,
1506 Node* obj, 1580 Node* obj,
2018 // If any parameters are doubles, they must be rounded before 2092 // If any parameters are doubles, they must be rounded before
2019 // the call, dstore_rounding does gvn.transform 2093 // the call, dstore_rounding does gvn.transform
2020 Node *arg = argument(j); 2094 Node *arg = argument(j);
2021 arg = dstore_rounding(arg); 2095 arg = dstore_rounding(arg);
2022 set_argument(j, arg); 2096 set_argument(j, arg);
2097 }
2098 }
2099 }
2100
2101 /**
2102 * Record profiling data exact_kls for Node n with the type system so
2103 * that it can propagate it (speculation)
2104 *
2105 * @param n node that the type applies to
2106 * @param exact_kls type from profiling
2107 *
2108 * @return node with improved type
2109 */
2110 Node* GraphKit::record_profile_for_speculation(Node* n, ciKlass* exact_kls) {
2111 const TypeOopPtr* current_type = _gvn.type(n)->isa_oopptr();
2112 assert(UseTypeSpeculation, "type speculation must be on");
2113 if (exact_kls != NULL &&
2114 // nothing to improve if type is already exact
2115 (current_type == NULL ||
2116 (!current_type->klass_is_exact() &&
2117 (current_type->speculative() == NULL ||
2118 !current_type->speculative()->klass_is_exact())))) {
2119 const TypeKlassPtr* tklass = TypeKlassPtr::make(exact_kls);
2120 const TypeOopPtr* xtype = tklass->as_instance_type();
2121 assert(xtype->klass_is_exact(), "Should be exact");
2122
2123 // Build a type with a speculative type (what we think we know
2124 // about the type but will need a guard when we use it)
2125 const TypeOopPtr* spec_type = TypeOopPtr::make(TypePtr::BotPTR, Type::OffsetBot, TypeOopPtr::InstanceBot, xtype);
2126 // We're changing the type, we need a new cast node to carry the
2127 // new type. The new type depends on the control: what profiling
2128 // tells us is only valid from here as far as we can tell.
2129 Node* cast = new(C) CastPPNode(n, spec_type);
2130 cast->init_req(0, control());
2131 cast = _gvn.transform(cast);
2132 replace_in_map(n, cast);
2133 n = cast;
2134 }
2135 return n;
2136 }
2137
2138 /**
2139 * Record profiling data from receiver profiling at an invoke with the
2140 * type system so that it can propagate it (speculation)
2141 *
2142 * @param n receiver node
2143 *
2144 * @return node with improved type
2145 */
2146 Node* GraphKit::record_profiled_receiver_for_speculation(Node* n) {
2147 if (!UseTypeSpeculation) {
2148 return n;
2149 }
2150 ciKlass* exact_kls = profile_has_unique_klass();
2151 return record_profile_for_speculation(n, exact_kls);
2152 }
2153
2154 /**
2155 * Record profiling data from argument profiling at an invoke with the
2156 * type system so that it can propagate it (speculation)
2157 *
2158 * @param dest_method target method for the call
2159 * @param bc what invoke bytecode is this?
2160 */
2161 void GraphKit::record_profiled_arguments_for_speculation(ciMethod* dest_method, Bytecodes::Code bc) {
2162 if (!UseTypeSpeculation) {
2163 return;
2164 }
2165 const TypeFunc* tf = TypeFunc::make(dest_method);
2166 int nargs = tf->_domain->_cnt - TypeFunc::Parms;
2167 int skip = Bytecodes::has_receiver(bc) ? 1 : 0;
2168 for (int j = skip, i = 0; j < nargs && i < TypeProfileArgsLimit; j++) {
2169 const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
2170 if (targ->basic_type() == T_OBJECT || targ->basic_type() == T_ARRAY) {
2171 ciKlass* better_type = method()->argument_profiled_type(bci(), i);
2172 if (better_type != NULL) {
2173 record_profile_for_speculation(argument(j), better_type);
2174 }
2175 i++;
2176 }
2177 }
2178 }
2179
2180 /**
2181 * Record profiling data from parameter profiling at an invoke with
2182 * the type system so that it can propagate it (speculation)
2183 */
2184 void GraphKit::record_profiled_parameters_for_speculation() {
2185 if (!UseTypeSpeculation) {
2186 return;
2187 }
2188 for (int i = 0, j = 0; i < method()->arg_size() ; i++) {
2189 if (_gvn.type(local(i))->isa_oopptr()) {
2190 ciKlass* better_type = method()->parameter_profiled_type(j);
2191 if (better_type != NULL) {
2192 record_profile_for_speculation(local(i), better_type);
2193 }
2194 j++;
2023 } 2195 }
2024 } 2196 }
2025 } 2197 }
2026 2198
2027 void GraphKit::round_double_result(ciMethod* dest_method) { 2199 void GraphKit::round_double_result(ciMethod* dest_method) {
2101 2273
2102 //------------------------------null_check_oop--------------------------------- 2274 //------------------------------null_check_oop---------------------------------
2103 // Null check oop. Set null-path control into Region in slot 3. 2275 // Null check oop. Set null-path control into Region in slot 3.
2104 // Make a cast-not-nullness use the other not-null control. Return cast. 2276 // Make a cast-not-nullness use the other not-null control. Return cast.
2105 Node* GraphKit::null_check_oop(Node* value, Node* *null_control, 2277 Node* GraphKit::null_check_oop(Node* value, Node* *null_control,
2106 bool never_see_null) { 2278 bool never_see_null, bool safe_for_replace) {
2107 // Initial NULL check taken path 2279 // Initial NULL check taken path
2108 (*null_control) = top(); 2280 (*null_control) = top();
2109 Node* cast = null_check_common(value, T_OBJECT, false, null_control); 2281 Node* cast = null_check_common(value, T_OBJECT, false, null_control);
2110 2282
2111 // Generate uncommon_trap: 2283 // Generate uncommon_trap:
2118 set_control(*null_control); 2290 set_control(*null_control);
2119 replace_in_map(value, null()); 2291 replace_in_map(value, null());
2120 uncommon_trap(Deoptimization::Reason_null_check, 2292 uncommon_trap(Deoptimization::Reason_null_check,
2121 Deoptimization::Action_make_not_entrant); 2293 Deoptimization::Action_make_not_entrant);
2122 (*null_control) = top(); // NULL path is dead 2294 (*null_control) = top(); // NULL path is dead
2295 }
2296 if ((*null_control) == top() && safe_for_replace) {
2297 replace_in_map(value, cast);
2123 } 2298 }
2124 2299
2125 // Cast away null-ness on the result 2300 // Cast away null-ness on the result
2126 return cast; 2301 return cast;
2127 } 2302 }
2556 2731
2557 //------------------------maybe_cast_profiled_receiver------------------------- 2732 //------------------------maybe_cast_profiled_receiver-------------------------
2558 // If the profile has seen exactly one type, narrow to exactly that type. 2733 // If the profile has seen exactly one type, narrow to exactly that type.
2559 // Subsequent type checks will always fold up. 2734 // Subsequent type checks will always fold up.
2560 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj, 2735 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
2561 ciProfileData* data, 2736 ciKlass* require_klass,
2562 ciKlass* require_klass) { 2737 ciKlass* spec_klass,
2738 bool safe_for_replace) {
2563 if (!UseTypeProfile || !TypeProfileCasts) return NULL; 2739 if (!UseTypeProfile || !TypeProfileCasts) return NULL;
2564 if (data == NULL) return NULL;
2565 2740
2566 // Make sure we haven't already deoptimized from this tactic. 2741 // Make sure we haven't already deoptimized from this tactic.
2567 if (too_many_traps(Deoptimization::Reason_class_check)) 2742 if (too_many_traps(Deoptimization::Reason_class_check))
2568 return NULL; 2743 return NULL;
2569 2744
2570 // (No, this isn't a call, but it's enough like a virtual call 2745 // (No, this isn't a call, but it's enough like a virtual call
2571 // to use the same ciMethod accessor to get the profile info...) 2746 // to use the same ciMethod accessor to get the profile info...)
2572 ciCallProfile profile = method()->call_profile_at_bci(bci()); 2747 // If we have a speculative type use it instead of profiling (which
2573 if (profile.count() >= 0 && // no cast failures here 2748 // may not help us)
2574 profile.has_receiver(0) && 2749 ciKlass* exact_kls = spec_klass == NULL ? profile_has_unique_klass() : spec_klass;
2575 profile.morphism() == 1) { 2750 if (exact_kls != NULL) {// no cast failures here
2576 ciKlass* exact_kls = profile.receiver(0);
2577 if (require_klass == NULL || 2751 if (require_klass == NULL ||
2578 static_subtype_check(require_klass, exact_kls) == SSC_always_true) { 2752 static_subtype_check(require_klass, exact_kls) == SSC_always_true) {
2579 // If we narrow the type to match what the type profile sees, 2753 // If we narrow the type to match what the type profile sees or
2580 // we can then remove the rest of the cast. 2754 // the speculative type, we can then remove the rest of the
2755 // cast.
2581 // This is a win, even if the exact_kls is very specific, 2756 // This is a win, even if the exact_kls is very specific,
2582 // because downstream operations, such as method calls, 2757 // because downstream operations, such as method calls,
2583 // will often benefit from the sharper type. 2758 // will often benefit from the sharper type.
2584 Node* exact_obj = not_null_obj; // will get updated in place... 2759 Node* exact_obj = not_null_obj; // will get updated in place...
2585 Node* slow_ctl = type_check_receiver(exact_obj, exact_kls, 1.0, 2760 Node* slow_ctl = type_check_receiver(exact_obj, exact_kls, 1.0,
2587 { PreserveJVMState pjvms(this); 2762 { PreserveJVMState pjvms(this);
2588 set_control(slow_ctl); 2763 set_control(slow_ctl);
2589 uncommon_trap(Deoptimization::Reason_class_check, 2764 uncommon_trap(Deoptimization::Reason_class_check,
2590 Deoptimization::Action_maybe_recompile); 2765 Deoptimization::Action_maybe_recompile);
2591 } 2766 }
2767 if (safe_for_replace) {
2768 replace_in_map(not_null_obj, exact_obj);
2769 }
2770 return exact_obj;
2771 }
2772 // assert(ssc == SSC_always_true)... except maybe the profile lied to us.
2773 }
2774
2775 return NULL;
2776 }
2777
2778 /**
2779 * Cast obj to type and emit guard unless we had too many traps here
2780 * already
2781 *
2782 * @param obj node being casted
2783 * @param type type to cast the node to
2784 * @param not_null true if we know node cannot be null
2785 */
2786 Node* GraphKit::maybe_cast_profiled_obj(Node* obj,
2787 ciKlass* type,
2788 bool not_null) {
2789 // type == NULL if profiling tells us this object is always null
2790 if (type != NULL) {
2791 if (!too_many_traps(Deoptimization::Reason_null_check) &&
2792 !too_many_traps(Deoptimization::Reason_class_check)) {
2793 Node* not_null_obj = NULL;
2794 // not_null is true if we know the object is not null and
2795 // there's no need for a null check
2796 if (!not_null) {
2797 Node* null_ctl = top();
2798 not_null_obj = null_check_oop(obj, &null_ctl, true, true);
2799 assert(null_ctl->is_top(), "no null control here");
2800 } else {
2801 not_null_obj = obj;
2802 }
2803
2804 Node* exact_obj = not_null_obj;
2805 ciKlass* exact_kls = type;
2806 Node* slow_ctl = type_check_receiver(exact_obj, exact_kls, 1.0,
2807 &exact_obj);
2808 {
2809 PreserveJVMState pjvms(this);
2810 set_control(slow_ctl);
2811 uncommon_trap(Deoptimization::Reason_class_check,
2812 Deoptimization::Action_maybe_recompile);
2813 }
2592 replace_in_map(not_null_obj, exact_obj); 2814 replace_in_map(not_null_obj, exact_obj);
2593 return exact_obj; 2815 obj = exact_obj;
2594 } 2816 }
2595 // assert(ssc == SSC_always_true)... except maybe the profile lied to us. 2817 } else {
2596 } 2818 if (!too_many_traps(Deoptimization::Reason_null_assert)) {
2597 2819 Node* exact_obj = null_assert(obj);
2598 return NULL; 2820 replace_in_map(obj, exact_obj);
2599 } 2821 obj = exact_obj;
2600 2822 }
2823 }
2824 return obj;
2825 }
2601 2826
2602 //-------------------------------gen_instanceof-------------------------------- 2827 //-------------------------------gen_instanceof--------------------------------
2603 // Generate an instance-of idiom. Used by both the instance-of bytecode 2828 // Generate an instance-of idiom. Used by both the instance-of bytecode
2604 // and the reflective instance-of call. 2829 // and the reflective instance-of call.
2605 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass) { 2830 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass, bool safe_for_replace) {
2606 kill_dead_locals(); // Benefit all the uncommon traps 2831 kill_dead_locals(); // Benefit all the uncommon traps
2607 assert( !stopped(), "dead parse path should be checked in callers" ); 2832 assert( !stopped(), "dead parse path should be checked in callers" );
2608 assert(!TypePtr::NULL_PTR->higher_equal(_gvn.type(superklass)->is_klassptr()), 2833 assert(!TypePtr::NULL_PTR->higher_equal(_gvn.type(superklass)->is_klassptr()),
2609 "must check for not-null not-dead klass in callers"); 2834 "must check for not-null not-dead klass in callers");
2610 2835
2621 bool never_see_null = (ProfileDynamicTypes // aggressive use of profile 2846 bool never_see_null = (ProfileDynamicTypes // aggressive use of profile
2622 && seems_never_null(obj, data)); 2847 && seems_never_null(obj, data));
2623 2848
2624 // Null check; get casted pointer; set region slot 3 2849 // Null check; get casted pointer; set region slot 3
2625 Node* null_ctl = top(); 2850 Node* null_ctl = top();
2626 Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null); 2851 Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
2627 2852
2628 // If not_null_obj is dead, only null-path is taken 2853 // If not_null_obj is dead, only null-path is taken
2629 if (stopped()) { // Doing instance-of on a NULL? 2854 if (stopped()) { // Doing instance-of on a NULL?
2630 set_control(null_ctl); 2855 set_control(null_ctl);
2631 return intcon(0); 2856 return intcon(0);
2638 assert(_null_path == PATH_LIMIT-1, "delete last"); 2863 assert(_null_path == PATH_LIMIT-1, "delete last");
2639 region->del_req(_null_path); 2864 region->del_req(_null_path);
2640 phi ->del_req(_null_path); 2865 phi ->del_req(_null_path);
2641 } 2866 }
2642 2867
2643 if (ProfileDynamicTypes && data != NULL) { 2868 // Do we know the type check always succeed?
2644 Node* cast_obj = maybe_cast_profiled_receiver(not_null_obj, data, NULL); 2869 bool known_statically = false;
2645 if (stopped()) { // Profile disagrees with this path. 2870 if (_gvn.type(superklass)->singleton()) {
2646 set_control(null_ctl); // Null is the only remaining possibility. 2871 ciKlass* superk = _gvn.type(superklass)->is_klassptr()->klass();
2647 return intcon(0); 2872 ciKlass* subk = _gvn.type(obj)->is_oopptr()->klass();
2648 } 2873 if (subk != NULL && subk->is_loaded()) {
2649 if (cast_obj != NULL) 2874 int static_res = static_subtype_check(superk, subk);
2650 not_null_obj = cast_obj; 2875 known_statically = (static_res == SSC_always_true || static_res == SSC_always_false);
2876 }
2877 }
2878
2879 if (known_statically && UseTypeSpeculation) {
2880 // If we know the type check always succeed then we don't use the
2881 // profiling data at this bytecode. Don't lose it, feed it to the
2882 // type system as a speculative type.
2883 not_null_obj = record_profiled_receiver_for_speculation(not_null_obj);
2884 } else {
2885 const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
2886 // We may not have profiling here or it may not help us. If we
2887 // have a speculative type use it to perform an exact cast.
2888 ciKlass* spec_obj_type = obj_type->speculative_type();
2889 if (spec_obj_type != NULL || (ProfileDynamicTypes && data != NULL)) {
2890 Node* cast_obj = maybe_cast_profiled_receiver(not_null_obj, NULL, spec_obj_type, safe_for_replace);
2891 if (stopped()) { // Profile disagrees with this path.
2892 set_control(null_ctl); // Null is the only remaining possibility.
2893 return intcon(0);
2894 }
2895 if (cast_obj != NULL) {
2896 not_null_obj = cast_obj;
2897 }
2898 }
2651 } 2899 }
2652 2900
2653 // Load the object's klass 2901 // Load the object's klass
2654 Node* obj_klass = load_object_klass(not_null_obj); 2902 Node* obj_klass = load_object_klass(not_null_obj);
2655 2903
2692 if (tk->singleton()) { 2940 if (tk->singleton()) {
2693 const TypeOopPtr* objtp = _gvn.type(obj)->isa_oopptr(); 2941 const TypeOopPtr* objtp = _gvn.type(obj)->isa_oopptr();
2694 if (objtp != NULL && objtp->klass() != NULL) { 2942 if (objtp != NULL && objtp->klass() != NULL) {
2695 switch (static_subtype_check(tk->klass(), objtp->klass())) { 2943 switch (static_subtype_check(tk->klass(), objtp->klass())) {
2696 case SSC_always_true: 2944 case SSC_always_true:
2697 return obj; 2945 // If we know the type check always succeed then we don't use
2946 // the profiling data at this bytecode. Don't lose it, feed it
2947 // to the type system as a speculative type.
2948 return record_profiled_receiver_for_speculation(obj);
2698 case SSC_always_false: 2949 case SSC_always_false:
2699 // It needs a null check because a null will *pass* the cast check. 2950 // It needs a null check because a null will *pass* the cast check.
2700 // A non-null value will always produce an exception. 2951 // A non-null value will always produce an exception.
2701 return null_assert(obj); 2952 return null_assert(obj);
2702 } 2953 }
2703 } 2954 }
2704 } 2955 }
2705 2956
2706 ciProfileData* data = NULL; 2957 ciProfileData* data = NULL;
2958 bool safe_for_replace = false;
2707 if (failure_control == NULL) { // use MDO in regular case only 2959 if (failure_control == NULL) { // use MDO in regular case only
2708 assert(java_bc() == Bytecodes::_aastore || 2960 assert(java_bc() == Bytecodes::_aastore ||
2709 java_bc() == Bytecodes::_checkcast, 2961 java_bc() == Bytecodes::_checkcast,
2710 "interpreter profiles type checks only for these BCs"); 2962 "interpreter profiles type checks only for these BCs");
2711 data = method()->method_data()->bci_to_data(bci()); 2963 data = method()->method_data()->bci_to_data(bci());
2964 safe_for_replace = true;
2712 } 2965 }
2713 2966
2714 // Make the merge point 2967 // Make the merge point
2715 enum { _obj_path = 1, _null_path, PATH_LIMIT }; 2968 enum { _obj_path = 1, _null_path, PATH_LIMIT };
2716 RegionNode* region = new (C) RegionNode(PATH_LIMIT); 2969 RegionNode* region = new (C) RegionNode(PATH_LIMIT);
2721 bool never_see_null = ((failure_control == NULL) // regular case only 2974 bool never_see_null = ((failure_control == NULL) // regular case only
2722 && seems_never_null(obj, data)); 2975 && seems_never_null(obj, data));
2723 2976
2724 // Null check; get casted pointer; set region slot 3 2977 // Null check; get casted pointer; set region slot 3
2725 Node* null_ctl = top(); 2978 Node* null_ctl = top();
2726 Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null); 2979 Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
2727 2980
2728 // If not_null_obj is dead, only null-path is taken 2981 // If not_null_obj is dead, only null-path is taken
2729 if (stopped()) { // Doing instance-of on a NULL? 2982 if (stopped()) { // Doing instance-of on a NULL?
2730 set_control(null_ctl); 2983 set_control(null_ctl);
2731 return null(); 2984 return null();
2739 region->del_req(_null_path); 2992 region->del_req(_null_path);
2740 phi ->del_req(_null_path); 2993 phi ->del_req(_null_path);
2741 } 2994 }
2742 2995
2743 Node* cast_obj = NULL; 2996 Node* cast_obj = NULL;
2744 if (data != NULL && 2997 const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
2745 // Counter has never been decremented (due to cast failure). 2998 // We may not have profiling here or it may not help us. If we have
2746 // ...This is a reasonable thing to expect. It is true of 2999 // a speculative type use it to perform an exact cast.
2747 // all casts inserted by javac to implement generic types. 3000 ciKlass* spec_obj_type = obj_type->speculative_type();
2748 data->as_CounterData()->count() >= 0) { 3001 if (spec_obj_type != NULL ||
2749 cast_obj = maybe_cast_profiled_receiver(not_null_obj, data, tk->klass()); 3002 (data != NULL &&
3003 // Counter has never been decremented (due to cast failure).
3004 // ...This is a reasonable thing to expect. It is true of
3005 // all casts inserted by javac to implement generic types.
3006 data->as_CounterData()->count() >= 0)) {
3007 cast_obj = maybe_cast_profiled_receiver(not_null_obj, tk->klass(), spec_obj_type, safe_for_replace);
2750 if (cast_obj != NULL) { 3008 if (cast_obj != NULL) {
2751 if (failure_control != NULL) // failure is now impossible 3009 if (failure_control != NULL) // failure is now impossible
2752 (*failure_control) = top(); 3010 (*failure_control) = top();
2753 // adjust the type of the phi to the exact klass: 3011 // adjust the type of the phi to the exact klass:
2754 phi->raise_bottom_type(_gvn.type(cast_obj)->meet(TypePtr::NULL_PTR)); 3012 phi->raise_bottom_type(_gvn.type(cast_obj)->meet(TypePtr::NULL_PTR));
3549 assert(pre_val == NULL, "loaded already?"); 3807 assert(pre_val == NULL, "loaded already?");
3550 assert(val_type != NULL, "need a type"); 3808 assert(val_type != NULL, "need a type");
3551 } else { 3809 } else {
3552 // In this case both val_type and alias_idx are unused. 3810 // In this case both val_type and alias_idx are unused.
3553 assert(pre_val != NULL, "must be loaded already"); 3811 assert(pre_val != NULL, "must be loaded already");
3812 // Nothing to be done if pre_val is null.
3813 if (pre_val->bottom_type() == TypePtr::NULL_PTR) return;
3554 assert(pre_val->bottom_type()->basic_type() == T_OBJECT, "or we shouldn't be here"); 3814 assert(pre_val->bottom_type()->basic_type() == T_OBJECT, "or we shouldn't be here");
3555 } 3815 }
3556 assert(bt == T_OBJECT, "or we shouldn't be here"); 3816 assert(bt == T_OBJECT, "or we shouldn't be here");
3557 3817
3558 IdealKit ideal(this, true); 3818 IdealKit ideal(this, true);
3585 3845
3586 // Now some of the values 3846 // Now some of the values
3587 Node* marking = __ load(__ ctrl(), marking_adr, TypeInt::INT, active_type, Compile::AliasIdxRaw); 3847 Node* marking = __ load(__ ctrl(), marking_adr, TypeInt::INT, active_type, Compile::AliasIdxRaw);
3588 3848
3589 // if (!marking) 3849 // if (!marking)
3590 __ if_then(marking, BoolTest::ne, zero); { 3850 __ if_then(marking, BoolTest::ne, zero, unlikely); {
3591 BasicType index_bt = TypeX_X->basic_type(); 3851 BasicType index_bt = TypeX_X->basic_type();
3592 assert(sizeof(size_t) == type2aelembytes(index_bt), "Loading G1 PtrQueue::_index with wrong size."); 3852 assert(sizeof(size_t) == type2aelembytes(index_bt), "Loading G1 PtrQueue::_index with wrong size.");
3593 Node* index = __ load(__ ctrl(), index_adr, TypeX_X, index_bt, Compile::AliasIdxRaw); 3853 Node* index = __ load(__ ctrl(), index_adr, TypeX_X, index_bt, Compile::AliasIdxRaw);
3594 3854
3595 if (do_load) { 3855 if (do_load) {
3596 // load original value 3856 // load original value
3597 // alias_idx correct?? 3857 // alias_idx correct??
3598 pre_val = __ load(no_ctrl, adr, val_type, bt, alias_idx); 3858 pre_val = __ load(__ ctrl(), adr, val_type, bt, alias_idx);
3599 } 3859 }
3600 3860
3601 // if (pre_val != NULL) 3861 // if (pre_val != NULL)
3602 __ if_then(pre_val, BoolTest::ne, null()); { 3862 __ if_then(pre_val, BoolTest::ne, null()); {
3603 Node* buffer = __ load(__ ctrl(), buffer_adr, TypeRawPtr::NOTNULL, T_ADDRESS, Compile::AliasIdxRaw); 3863 Node* buffer = __ load(__ ctrl(), buffer_adr, TypeRawPtr::NOTNULL, T_ADDRESS, Compile::AliasIdxRaw);
3690 Node* tls = __ thread(); // ThreadLocalStorage 3950 Node* tls = __ thread(); // ThreadLocalStorage
3691 3951
3692 Node* no_base = __ top(); 3952 Node* no_base = __ top();
3693 float likely = PROB_LIKELY(0.999); 3953 float likely = PROB_LIKELY(0.999);
3694 float unlikely = PROB_UNLIKELY(0.999); 3954 float unlikely = PROB_UNLIKELY(0.999);
3695 Node* zero = __ ConI(0); 3955 Node* young_card = __ ConI((jint)G1SATBCardTableModRefBS::g1_young_card_val());
3956 Node* dirty_card = __ ConI((jint)CardTableModRefBS::dirty_card_val());
3696 Node* zeroX = __ ConX(0); 3957 Node* zeroX = __ ConX(0);
3697 3958
3698 // Get the alias_index for raw card-mark memory 3959 // Get the alias_index for raw card-mark memory
3699 const TypePtr* card_type = TypeRawPtr::BOTTOM; 3960 const TypePtr* card_type = TypeRawPtr::BOTTOM;
3700 3961
3746 // Ok must mark the card if not already dirty 4007 // Ok must mark the card if not already dirty
3747 4008
3748 // load the original value of the card 4009 // load the original value of the card
3749 Node* card_val = __ load(__ ctrl(), card_adr, TypeInt::INT, T_BYTE, Compile::AliasIdxRaw); 4010 Node* card_val = __ load(__ ctrl(), card_adr, TypeInt::INT, T_BYTE, Compile::AliasIdxRaw);
3750 4011
3751 __ if_then(card_val, BoolTest::ne, zero); { 4012 __ if_then(card_val, BoolTest::ne, young_card); {
3752 g1_mark_card(ideal, card_adr, oop_store, alias_idx, index, index_adr, buffer, tf); 4013 sync_kit(ideal);
4014 // Use Op_MemBarVolatile to achieve the effect of a StoreLoad barrier.
4015 insert_mem_bar(Op_MemBarVolatile, oop_store);
4016 __ sync_kit(this);
4017
4018 Node* card_val_reload = __ load(__ ctrl(), card_adr, TypeInt::INT, T_BYTE, Compile::AliasIdxRaw);
4019 __ if_then(card_val_reload, BoolTest::ne, dirty_card); {
4020 g1_mark_card(ideal, card_adr, oop_store, alias_idx, index, index_adr, buffer, tf);
4021 } __ end_if();
3753 } __ end_if(); 4022 } __ end_if();
3754 } __ end_if(); 4023 } __ end_if();
3755 } __ end_if(); 4024 } __ end_if();
3756 } else { 4025 } else {
3757 // Object.clone() instrinsic uses this path. 4026 // Object.clone() instrinsic uses this path.
3802 const TypePtr* value_field_type = string_type->add_offset(value_offset); 4071 const TypePtr* value_field_type = string_type->add_offset(value_offset);
3803 const TypeAryPtr* value_type = TypeAryPtr::make(TypePtr::NotNull, 4072 const TypeAryPtr* value_type = TypeAryPtr::make(TypePtr::NotNull,
3804 TypeAry::make(TypeInt::CHAR,TypeInt::POS), 4073 TypeAry::make(TypeInt::CHAR,TypeInt::POS),
3805 ciTypeArrayKlass::make(T_CHAR), true, 0); 4074 ciTypeArrayKlass::make(T_CHAR), true, 0);
3806 int value_field_idx = C->get_alias_index(value_field_type); 4075 int value_field_idx = C->get_alias_index(value_field_type);
3807 return make_load(ctrl, basic_plus_adr(str, str, value_offset), 4076 Node* load = make_load(ctrl, basic_plus_adr(str, str, value_offset),
3808 value_type, T_OBJECT, value_field_idx); 4077 value_type, T_OBJECT, value_field_idx);
4078 // String.value field is known to be @Stable.
4079 if (UseImplicitStableValues) {
4080 load = cast_array_to_stable(load, value_type);
4081 }
4082 return load;
3809 } 4083 }
3810 4084
3811 void GraphKit::store_String_offset(Node* ctrl, Node* str, Node* value) { 4085 void GraphKit::store_String_offset(Node* ctrl, Node* str, Node* value) {
3812 int offset_offset = java_lang_String::offset_offset_in_bytes(); 4086 int offset_offset = java_lang_String::offset_offset_in_bytes();
3813 const TypeInstPtr* string_type = TypeInstPtr::make(TypePtr::NotNull, C->env()->String_klass(), 4087 const TypeInstPtr* string_type = TypeInstPtr::make(TypePtr::NotNull, C->env()->String_klass(),
3821 void GraphKit::store_String_value(Node* ctrl, Node* str, Node* value) { 4095 void GraphKit::store_String_value(Node* ctrl, Node* str, Node* value) {
3822 int value_offset = java_lang_String::value_offset_in_bytes(); 4096 int value_offset = java_lang_String::value_offset_in_bytes();
3823 const TypeInstPtr* string_type = TypeInstPtr::make(TypePtr::NotNull, C->env()->String_klass(), 4097 const TypeInstPtr* string_type = TypeInstPtr::make(TypePtr::NotNull, C->env()->String_klass(),
3824 false, NULL, 0); 4098 false, NULL, 0);
3825 const TypePtr* value_field_type = string_type->add_offset(value_offset); 4099 const TypePtr* value_field_type = string_type->add_offset(value_offset);
3826 const TypeAryPtr* value_type = TypeAryPtr::make(TypePtr::NotNull, 4100
3827 TypeAry::make(TypeInt::CHAR,TypeInt::POS), 4101 store_oop_to_object(ctrl, str, basic_plus_adr(str, value_offset), value_field_type,
3828 ciTypeArrayKlass::make(T_CHAR), true, 0); 4102 value, TypeAryPtr::CHARS, T_OBJECT);
3829 int value_field_idx = C->get_alias_index(value_field_type);
3830 store_to_memory(ctrl, basic_plus_adr(str, value_offset),
3831 value, T_OBJECT, value_field_idx);
3832 } 4103 }
3833 4104
3834 void GraphKit::store_String_length(Node* ctrl, Node* str, Node* value) { 4105 void GraphKit::store_String_length(Node* ctrl, Node* str, Node* value) {
3835 int count_offset = java_lang_String::count_offset_in_bytes(); 4106 int count_offset = java_lang_String::count_offset_in_bytes();
3836 const TypeInstPtr* string_type = TypeInstPtr::make(TypePtr::NotNull, C->env()->String_klass(), 4107 const TypeInstPtr* string_type = TypeInstPtr::make(TypePtr::NotNull, C->env()->String_klass(),
3838 const TypePtr* count_field_type = string_type->add_offset(count_offset); 4109 const TypePtr* count_field_type = string_type->add_offset(count_offset);
3839 int count_field_idx = C->get_alias_index(count_field_type); 4110 int count_field_idx = C->get_alias_index(count_field_type);
3840 store_to_memory(ctrl, basic_plus_adr(str, count_offset), 4111 store_to_memory(ctrl, basic_plus_adr(str, count_offset),
3841 value, T_INT, count_field_idx); 4112 value, T_INT, count_field_idx);
3842 } 4113 }
4114
4115 Node* GraphKit::cast_array_to_stable(Node* ary, const TypeAryPtr* ary_type) {
4116 // Reify the property as a CastPP node in Ideal graph to comply with monotonicity
4117 // assumption of CCP analysis.
4118 return _gvn.transform(new(C) CastPPNode(ary, ary_type->cast_to_stable(true)));
4119 }