comparison src/share/vm/opto/graphKit.cpp @ 1746:4b29a725c43c

6912064: type profiles need to be exploited more for dynamic language support Reviewed-by: kvn
author jrose
date Fri, 20 Aug 2010 23:40:30 -0700
parents 4311f23817fd
children 3e8fbc61cee8
comparison
equal deleted inserted replaced
1730:f55c4f82ab9d 1746:4b29a725c43c
2449 2449
2450 return fail; 2450 return fail;
2451 } 2451 }
2452 2452
2453 2453
2454 //------------------------------seems_never_null-------------------------------
2455 // Use null_seen information if it is available from the profile.
2456 // If we see an unexpected null at a type check we record it and force a
2457 // recompile; the offending check will be recompiled to handle NULLs.
2458 // If we see several offending BCIs, then all checks in the
2459 // method will be recompiled.
2460 bool GraphKit::seems_never_null(Node* obj, ciProfileData* data) {
2461 if (UncommonNullCast // Cutout for this technique
2462 && obj != null() // And not the -Xcomp stupid case?
2463 && !too_many_traps(Deoptimization::Reason_null_check)
2464 ) {
2465 if (data == NULL)
2466 // Edge case: no mature data. Be optimistic here.
2467 return true;
2468 // If the profile has not seen a null, assume it won't happen.
2469 assert(java_bc() == Bytecodes::_checkcast ||
2470 java_bc() == Bytecodes::_instanceof ||
2471 java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2472 return !data->as_BitData()->null_seen();
2473 }
2474 return false;
2475 }
2476
2477 //------------------------maybe_cast_profiled_receiver-------------------------
2478 // If the profile has seen exactly one type, narrow to exactly that type.
2479 // Subsequent type checks will always fold up.
2480 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
2481 ciProfileData* data,
2482 ciKlass* require_klass) {
2483 if (!UseTypeProfile || !TypeProfileCasts) return NULL;
2484 if (data == NULL) return NULL;
2485
2486 // Make sure we haven't already deoptimized from this tactic.
2487 if (too_many_traps(Deoptimization::Reason_class_check))
2488 return NULL;
2489
2490 // (No, this isn't a call, but it's enough like a virtual call
2491 // to use the same ciMethod accessor to get the profile info...)
2492 ciCallProfile profile = method()->call_profile_at_bci(bci());
2493 if (profile.count() >= 0 && // no cast failures here
2494 profile.has_receiver(0) &&
2495 profile.morphism() == 1) {
2496 ciKlass* exact_kls = profile.receiver(0);
2497 if (require_klass == NULL ||
2498 static_subtype_check(require_klass, exact_kls) == SSC_always_true) {
2499 // If we narrow the type to match what the type profile sees,
2500 // we can then remove the rest of the cast.
2501 // This is a win, even if the exact_kls is very specific,
2502 // because downstream operations, such as method calls,
2503 // will often benefit from the sharper type.
2504 Node* exact_obj = not_null_obj; // will get updated in place...
2505 Node* slow_ctl = type_check_receiver(exact_obj, exact_kls, 1.0,
2506 &exact_obj);
2507 { PreserveJVMState pjvms(this);
2508 set_control(slow_ctl);
2509 uncommon_trap(Deoptimization::Reason_class_check,
2510 Deoptimization::Action_maybe_recompile);
2511 }
2512 replace_in_map(not_null_obj, exact_obj);
2513 return exact_obj;
2514 }
2515 // assert(ssc == SSC_always_true)... except maybe the profile lied to us.
2516 }
2517
2518 return NULL;
2519 }
2520
2521
2454 //-------------------------------gen_instanceof-------------------------------- 2522 //-------------------------------gen_instanceof--------------------------------
2455 // Generate an instance-of idiom. Used by both the instance-of bytecode 2523 // Generate an instance-of idiom. Used by both the instance-of bytecode
2456 // and the reflective instance-of call. 2524 // and the reflective instance-of call.
2457 Node* GraphKit::gen_instanceof( Node *subobj, Node* superklass ) { 2525 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass) {
2458 C->set_has_split_ifs(true); // Has chance for split-if optimization 2526 kill_dead_locals(); // Benefit all the uncommon traps
2459 assert( !stopped(), "dead parse path should be checked in callers" ); 2527 assert( !stopped(), "dead parse path should be checked in callers" );
2460 assert(!TypePtr::NULL_PTR->higher_equal(_gvn.type(superklass)->is_klassptr()), 2528 assert(!TypePtr::NULL_PTR->higher_equal(_gvn.type(superklass)->is_klassptr()),
2461 "must check for not-null not-dead klass in callers"); 2529 "must check for not-null not-dead klass in callers");
2462 2530
2463 // Make the merge point 2531 // Make the merge point
2464 enum { _obj_path = 1, _fail_path, _null_path, PATH_LIMIT }; 2532 enum { _obj_path = 1, _fail_path, _null_path, PATH_LIMIT };
2465 RegionNode* region = new(C, PATH_LIMIT) RegionNode(PATH_LIMIT); 2533 RegionNode* region = new(C, PATH_LIMIT) RegionNode(PATH_LIMIT);
2466 Node* phi = new(C, PATH_LIMIT) PhiNode(region, TypeInt::BOOL); 2534 Node* phi = new(C, PATH_LIMIT) PhiNode(region, TypeInt::BOOL);
2467 C->set_has_split_ifs(true); // Has chance for split-if optimization 2535 C->set_has_split_ifs(true); // Has chance for split-if optimization
2468 2536
2537 ciProfileData* data = NULL;
2538 if (java_bc() == Bytecodes::_instanceof) { // Only for the bytecode
2539 data = method()->method_data()->bci_to_data(bci());
2540 }
2541 bool never_see_null = (ProfileDynamicTypes // aggressive use of profile
2542 && seems_never_null(obj, data));
2543
2469 // Null check; get casted pointer; set region slot 3 2544 // Null check; get casted pointer; set region slot 3
2470 Node* null_ctl = top(); 2545 Node* null_ctl = top();
2471 Node* not_null_obj = null_check_oop(subobj, &null_ctl); 2546 Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null);
2472 2547
2473 // If not_null_obj is dead, only null-path is taken 2548 // If not_null_obj is dead, only null-path is taken
2474 if (stopped()) { // Doing instance-of on a NULL? 2549 if (stopped()) { // Doing instance-of on a NULL?
2475 set_control(null_ctl); 2550 set_control(null_ctl);
2476 return intcon(0); 2551 return intcon(0);
2477 } 2552 }
2478 region->init_req(_null_path, null_ctl); 2553 region->init_req(_null_path, null_ctl);
2479 phi ->init_req(_null_path, intcon(0)); // Set null path value 2554 phi ->init_req(_null_path, intcon(0)); // Set null path value
2555 if (null_ctl == top()) {
2556 // Do this eagerly, so that pattern matches like is_diamond_phi
2557 // will work even during parsing.
2558 assert(_null_path == PATH_LIMIT-1, "delete last");
2559 region->del_req(_null_path);
2560 phi ->del_req(_null_path);
2561 }
2562
2563 if (ProfileDynamicTypes && data != NULL) {
2564 Node* cast_obj = maybe_cast_profiled_receiver(not_null_obj, data, NULL);
2565 if (stopped()) { // Profile disagrees with this path.
2566 set_control(null_ctl); // Null is the only remaining possibility.
2567 return intcon(0);
2568 }
2569 if (cast_obj != NULL)
2570 not_null_obj = cast_obj;
2571 }
2480 2572
2481 // Load the object's klass 2573 // Load the object's klass
2482 Node* obj_klass = load_object_klass(not_null_obj); 2574 Node* obj_klass = load_object_klass(not_null_obj);
2483 2575
2484 // Generate the subtype check 2576 // Generate the subtype check
2544 RegionNode* region = new (C, PATH_LIMIT) RegionNode(PATH_LIMIT); 2636 RegionNode* region = new (C, PATH_LIMIT) RegionNode(PATH_LIMIT);
2545 Node* phi = new (C, PATH_LIMIT) PhiNode(region, toop); 2637 Node* phi = new (C, PATH_LIMIT) PhiNode(region, toop);
2546 C->set_has_split_ifs(true); // Has chance for split-if optimization 2638 C->set_has_split_ifs(true); // Has chance for split-if optimization
2547 2639
2548 // Use null-cast information if it is available 2640 // Use null-cast information if it is available
2549 bool never_see_null = false; 2641 bool never_see_null = ((failure_control == NULL) // regular case only
2550 // If we see an unexpected null at a check-cast we record it and force a 2642 && seems_never_null(obj, data));
2551 // recompile; the offending check-cast will be compiled to handle NULLs.
2552 // If we see several offending BCIs, then all checkcasts in the
2553 // method will be compiled to handle NULLs.
2554 if (UncommonNullCast // Cutout for this technique
2555 && failure_control == NULL // regular case
2556 && obj != null() // And not the -Xcomp stupid case?
2557 && !too_many_traps(Deoptimization::Reason_null_check)) {
2558 // Finally, check the "null_seen" bit from the interpreter.
2559 if (data == NULL || !data->as_BitData()->null_seen()) {
2560 never_see_null = true;
2561 }
2562 }
2563 2643
2564 // Null check; get casted pointer; set region slot 3 2644 // Null check; get casted pointer; set region slot 3
2565 Node* null_ctl = top(); 2645 Node* null_ctl = top();
2566 Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null); 2646 Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null);
2567 2647
2570 set_control(null_ctl); 2650 set_control(null_ctl);
2571 return null(); 2651 return null();
2572 } 2652 }
2573 region->init_req(_null_path, null_ctl); 2653 region->init_req(_null_path, null_ctl);
2574 phi ->init_req(_null_path, null()); // Set null path value 2654 phi ->init_req(_null_path, null()); // Set null path value
2575 2655 if (null_ctl == top()) {
2576 Node* cast_obj = NULL; // the casted version of the object 2656 // Do this eagerly, so that pattern matches like is_diamond_phi
2577 2657 // will work even during parsing.
2578 // If the profile has seen exactly one type, narrow to that type. 2658 assert(_null_path == PATH_LIMIT-1, "delete last");
2579 // (The subsequent subtype check will always fold up.) 2659 region->del_req(_null_path);
2580 if (UseTypeProfile && TypeProfileCasts && data != NULL && 2660 phi ->del_req(_null_path);
2661 }
2662
2663 Node* cast_obj = NULL;
2664 if (data != NULL &&
2581 // Counter has never been decremented (due to cast failure). 2665 // Counter has never been decremented (due to cast failure).
2582 // ...This is a reasonable thing to expect. It is true of 2666 // ...This is a reasonable thing to expect. It is true of
2583 // all casts inserted by javac to implement generic types. 2667 // all casts inserted by javac to implement generic types.
2584 data->as_CounterData()->count() >= 0 && 2668 data->as_CounterData()->count() >= 0) {
2585 !too_many_traps(Deoptimization::Reason_class_check)) { 2669 cast_obj = maybe_cast_profiled_receiver(not_null_obj, data, tk->klass());
2586 // (No, this isn't a call, but it's enough like a virtual call 2670 if (cast_obj != NULL) {
2587 // to use the same ciMethod accessor to get the profile info...) 2671 if (failure_control != NULL) // failure is now impossible
2588 ciCallProfile profile = method()->call_profile_at_bci(bci()); 2672 (*failure_control) = top();
2589 if (profile.count() >= 0 && // no cast failures here 2673 // adjust the type of the phi to the exact klass:
2590 profile.has_receiver(0) && 2674 phi->raise_bottom_type(_gvn.type(cast_obj)->meet(TypePtr::NULL_PTR));
2591 profile.morphism() == 1) {
2592 ciKlass* exact_kls = profile.receiver(0);
2593 int ssc = static_subtype_check(tk->klass(), exact_kls);
2594 if (ssc == SSC_always_true) {
2595 // If we narrow the type to match what the type profile sees,
2596 // we can then remove the rest of the cast.
2597 // This is a win, even if the exact_kls is very specific,
2598 // because downstream operations, such as method calls,
2599 // will often benefit from the sharper type.
2600 Node* exact_obj = not_null_obj; // will get updated in place...
2601 Node* slow_ctl = type_check_receiver(exact_obj, exact_kls, 1.0,
2602 &exact_obj);
2603 { PreserveJVMState pjvms(this);
2604 set_control(slow_ctl);
2605 uncommon_trap(Deoptimization::Reason_class_check,
2606 Deoptimization::Action_maybe_recompile);
2607 }
2608 if (failure_control != NULL) // failure is now impossible
2609 (*failure_control) = top();
2610 replace_in_map(not_null_obj, exact_obj);
2611 // adjust the type of the phi to the exact klass:
2612 phi->raise_bottom_type(_gvn.type(exact_obj)->meet(TypePtr::NULL_PTR));
2613 cast_obj = exact_obj;
2614 }
2615 // assert(cast_obj != NULL)... except maybe the profile lied to us.
2616 } 2675 }
2617 } 2676 }
2618 2677
2619 if (cast_obj == NULL) { 2678 if (cast_obj == NULL) {
2620 // Load the object's klass 2679 // Load the object's klass