comparison src/share/vm/code/compiledIC.cpp @ 10168:a6e09d6dd8e5

8003853: specify offset of IC load in java_to_interp stub Summary: refactored code to allow platform-specific differences Reviewed-by: dlong, twisti Contributed-by: Goetz Lindenmaier <goetz.lindenmaier@sap.com>
author dlong
date Wed, 24 Apr 2013 20:55:28 -0400
parents 41340544e182
children 836a62f43af9 b2e698d2276c
comparison
equal deleted inserted replaced
10106:42a42da29fd7 10168:a6e09d6dd8e5
42 #include "utilities/events.hpp" 42 #include "utilities/events.hpp"
43 43
44 44
45 // Every time a compiled IC is changed or its type is being accessed, 45 // Every time a compiled IC is changed or its type is being accessed,
46 // either the CompiledIC_lock must be set or we must be at a safe point. 46 // either the CompiledIC_lock must be set or we must be at a safe point.
47
48
49 // Release the CompiledICHolder* associated with this call site is there is one.
50 void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) {
51 // This call site might have become stale so inspect it carefully.
52 NativeCall* call = nativeCall_at(call_site->addr());
53 if (is_icholder_entry(call->destination())) {
54 NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
55 InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
56 }
57 }
58
59
60 bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
61 // This call site might have become stale so inspect it carefully.
62 NativeCall* call = nativeCall_at(call_site->addr());
63 return is_icholder_entry(call->destination());
64 }
65
66 47
67 //----------------------------------------------------------------------------- 48 //-----------------------------------------------------------------------------
68 // Low-level access to an inline cache. Private, since they might not be 49 // Low-level access to an inline cache. Private, since they might not be
69 // MT-safe to use. 50 // MT-safe to use.
70 51
486 bool CompiledIC::is_icholder_entry(address entry) { 467 bool CompiledIC::is_icholder_entry(address entry) {
487 CodeBlob* cb = CodeCache::find_blob_unsafe(entry); 468 CodeBlob* cb = CodeCache::find_blob_unsafe(entry);
488 return (cb != NULL && cb->is_adapter_blob()); 469 return (cb != NULL && cb->is_adapter_blob());
489 } 470 }
490 471
491
492 CompiledIC::CompiledIC(nmethod* nm, NativeCall* call)
493 : _ic_call(call)
494 {
495 address ic_call = call->instruction_address();
496
497 assert(ic_call != NULL, "ic_call address must be set");
498 assert(nm != NULL, "must pass nmethod");
499 assert(nm->contains(ic_call), "must be in nmethod");
500
501 // search for the ic_call at the given address
502 RelocIterator iter(nm, ic_call, ic_call+1);
503 bool ret = iter.next();
504 assert(ret == true, "relocInfo must exist at this address");
505 assert(iter.addr() == ic_call, "must find ic_call");
506 if (iter.type() == relocInfo::virtual_call_type) {
507 virtual_call_Relocation* r = iter.virtual_call_reloc();
508 _is_optimized = false;
509 _value = nativeMovConstReg_at(r->cached_value());
510 } else {
511 assert(iter.type() == relocInfo::opt_virtual_call_type, "must be a virtual call");
512 _is_optimized = true;
513 _value = NULL;
514 }
515 }
516
517
518 // ---------------------------------------------------------------------------- 472 // ----------------------------------------------------------------------------
519 473
520 void CompiledStaticCall::set_to_clean() { 474 void CompiledStaticCall::set_to_clean() {
521 assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call"); 475 assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
522 // Reset call site 476 // Reset call site
547 // must be in the stub part of the nmethod that contains the call 501 // must be in the stub part of the nmethod that contains the call
548 nmethod* nm = CodeCache::find_nmethod(instruction_address()); 502 nmethod* nm = CodeCache::find_nmethod(instruction_address());
549 return nm->stub_contains(destination()); 503 return nm->stub_contains(destination());
550 } 504 }
551 505
552
553 void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
554 address stub=find_stub();
555 guarantee(stub != NULL, "stub not found");
556
557 if (TraceICs) {
558 ResourceMark rm;
559 tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
560 instruction_address(),
561 callee->name_and_sig_as_C_string());
562 }
563
564 NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); // creation also verifies the object
565 NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
566
567 assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(), "a) MT-unsafe modification of inline cache");
568 assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry, "b) MT-unsafe modification of inline cache");
569
570 // Update stub
571 method_holder->set_data((intptr_t)callee());
572 jump->set_jump_destination(entry);
573
574 // Update jump to call
575 set_destination_mt_safe(stub);
576 }
577
578
579 void CompiledStaticCall::set(const StaticCallInfo& info) { 506 void CompiledStaticCall::set(const StaticCallInfo& info) {
580 assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call"); 507 assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
581 MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag); 508 MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
582 // Updating a cache to the wrong entry can cause bugs that are very hard 509 // Updating a cache to the wrong entry can cause bugs that are very hard
583 // to track down - if cache entry gets invalid - we just clean it. In 510 // to track down - if cache entry gets invalid - we just clean it. In
615 // puts a converter-frame on the stack to save arguments. 542 // puts a converter-frame on the stack to save arguments.
616 info._to_interpreter = true; 543 info._to_interpreter = true;
617 info._entry = m()->get_c2i_entry(); 544 info._entry = m()->get_c2i_entry();
618 } 545 }
619 } 546 }
620
621
622 void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
623 assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
624 // Reset stub
625 address stub = static_stub->addr();
626 assert(stub!=NULL, "stub not found");
627 NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); // creation also verifies the object
628 NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
629 method_holder->set_data(0);
630 jump->set_jump_destination((address)-1);
631 }
632
633 547
634 address CompiledStaticCall::find_stub() { 548 address CompiledStaticCall::find_stub() {
635 // Find reloc. information containing this call-site 549 // Find reloc. information containing this call-site
636 RelocIterator iter((nmethod*)NULL, instruction_address()); 550 RelocIterator iter((nmethod*)NULL, instruction_address());
637 while (iter.next()) { 551 while (iter.next()) {
666 } 580 }
667 assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted() 581 assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted()
668 || is_optimized() || is_megamorphic(), "sanity check"); 582 || is_optimized() || is_megamorphic(), "sanity check");
669 } 583 }
670 584
671
672 void CompiledIC::print() { 585 void CompiledIC::print() {
673 print_compiled_ic(); 586 print_compiled_ic();
674 tty->cr(); 587 tty->cr();
675 } 588 }
676 589
677
678 void CompiledIC::print_compiled_ic() { 590 void CompiledIC::print_compiled_ic() {
679 tty->print("Inline cache at " INTPTR_FORMAT ", calling %s " INTPTR_FORMAT " cached_value " INTPTR_FORMAT, 591 tty->print("Inline cache at " INTPTR_FORMAT ", calling %s " INTPTR_FORMAT " cached_value " INTPTR_FORMAT,
680 instruction_address(), is_call_to_interpreted() ? "interpreted " : "", ic_destination(), is_optimized() ? NULL : cached_value()); 592 instruction_address(), is_call_to_interpreted() ? "interpreted " : "", ic_destination(), is_optimized() ? NULL : cached_value());
681 } 593 }
682
683 594
684 void CompiledStaticCall::print() { 595 void CompiledStaticCall::print() {
685 tty->print("static call at " INTPTR_FORMAT " -> ", instruction_address()); 596 tty->print("static call at " INTPTR_FORMAT " -> ", instruction_address());
686 if (is_clean()) { 597 if (is_clean()) {
687 tty->print("clean"); 598 tty->print("clean");
691 tty->print("interpreted"); 602 tty->print("interpreted");
692 } 603 }
693 tty->cr(); 604 tty->cr();
694 } 605 }
695 606
696 void CompiledStaticCall::verify() { 607 #endif // !PRODUCT
697 // Verify call
698 NativeCall::verify();
699 if (os::is_MP()) {
700 verify_alignment();
701 }
702
703 // Verify stub
704 address stub = find_stub();
705 assert(stub != NULL, "no stub found for static call");
706 NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); // creation also verifies the object
707 NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
708
709 // Verify state
710 assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
711 }
712
713 #endif