comparison src/share/vm/code/compiledIC.cpp @ 8136:c66aa27ef4da

Reenable patching of optimized and static calls but without out-of-line stubs.
author Thomas Wuerthinger <thomas.wuerthinger@oracle.com>
date Wed, 06 Mar 2013 21:58:58 +0100
parents 1baf7f1e3f23
children b8f261ba79c6
comparison
equal deleted inserted replaced
8130:25ec01061adf 8136:c66aa27ef4da
93 assert(!is_icholder || is_icholder_entry(entry_point), "must be"); 93 assert(!is_icholder || is_icholder_entry(entry_point), "must be");
94 94
95 // Don't use ic_destination for this test since that forwards 95 // Don't use ic_destination for this test since that forwards
96 // through ICBuffer instead of returning the actual current state of 96 // through ICBuffer instead of returning the actual current state of
97 // the CompiledIC. 97 // the CompiledIC.
98 if (is_icholder_entry(_ic_call->destination())) { 98 if (is_icholder_entry(_ic_call->destination()) && !_is_optimized) {
99 // When patching for the ICStub case the cached value isn't 99 // When patching for the ICStub case the cached value isn't
100 // overwritten until the ICStub copied into the CompiledIC during 100 // overwritten until the ICStub copied into the CompiledIC during
101 // the next safepoint. Make sure that the CompiledICHolder* is 101 // the next safepoint. Make sure that the CompiledICHolder* is
102 // marked for release at this point since it won't be identifiable 102 // marked for release at this point since it won't be identifiable
103 // once the entry point is overwritten. 103 // once the entry point is overwritten.
123 CodeBlob* cb = CodeCache::find_blob_unsafe(_ic_call); 123 CodeBlob* cb = CodeCache::find_blob_unsafe(_ic_call);
124 assert(cb != NULL && cb->is_nmethod(), "must be nmethod"); 124 assert(cb != NULL && cb->is_nmethod(), "must be nmethod");
125 #endif 125 #endif
126 _ic_call->set_destination_mt_safe(entry_point); 126 _ic_call->set_destination_mt_safe(entry_point);
127 } 127 }
128
129 #ifdef GRAAL
130 if (_value == NULL) {
131 // Can happen when Graal converted a virtual call into an invoke special based on static analysis.
132 return;
133 }
134 #endif
128 135
129 if (is_optimized() || is_icstub) { 136 if (is_optimized() || is_icstub) {
130 // Optimized call sites don't have a cache value and ICStub call 137 // Optimized call sites don't have a cache value and ICStub call
131 // sites only change the entry point. Changing the value in that 138 // sites only change the entry point. Changing the value in that
132 // case could lead to MT safety issues. 139 // case could lead to MT safety issues.
263 assert(!is_call_to_interpreted || (is_icholder_call() && cached_icholder() != NULL), "sanity check"); 270 assert(!is_call_to_interpreted || (is_icholder_call() && cached_icholder() != NULL), "sanity check");
264 } else { 271 } else {
265 // Check if we are calling into our own codeblob (i.e., to a stub) 272 // Check if we are calling into our own codeblob (i.e., to a stub)
266 CodeBlob* cb = CodeCache::find_blob(_ic_call->instruction_address()); 273 CodeBlob* cb = CodeCache::find_blob(_ic_call->instruction_address());
267 address dest = ic_destination(); 274 address dest = ic_destination();
275 #ifndef GRAAL
268 #ifdef ASSERT 276 #ifdef ASSERT
269 { 277 {
270 CodeBlob* db = CodeCache::find_blob_unsafe(dest); 278 CodeBlob* db = CodeCache::find_blob_unsafe(dest);
271 assert(!db->is_adapter_blob(), "must use stub!"); 279 assert(!db->is_adapter_blob(), "must use stub!");
272 } 280 }
273 #endif /* ASSERT */ 281 #endif /* ASSERT */
282 #endif
274 is_call_to_interpreted = cb->contains(dest); 283 is_call_to_interpreted = cb->contains(dest);
275 } 284 }
276 return is_call_to_interpreted; 285 return is_call_to_interpreted;
277 } 286 }
278 287
549 return nm->stub_contains(destination()); 558 return nm->stub_contains(destination());
550 } 559 }
551 560
552 561
553 void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) { 562 void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
563 set_destination_mt_safe(entry);
554 address stub=find_stub(); 564 address stub=find_stub();
565 #ifdef GRAAL
566 if (stub == NULL) {
567 return;
568 }
569 #endif
555 assert(stub!=NULL, "stub not found"); 570 assert(stub!=NULL, "stub not found");
556 571
557 if (TraceICs) { 572 if (TraceICs) {
558 ResourceMark rm; 573 ResourceMark rm;
559 tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s", 574 tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
644 case relocInfo::opt_virtual_call_type: 659 case relocInfo::opt_virtual_call_type:
645 return iter.opt_virtual_call_reloc()->static_stub(); 660 return iter.opt_virtual_call_reloc()->static_stub();
646 case relocInfo::poll_type: 661 case relocInfo::poll_type:
647 case relocInfo::poll_return_type: // A safepoint can't overlap a call. 662 case relocInfo::poll_return_type: // A safepoint can't overlap a call.
648 default: 663 default:
664 #ifdef GRAAL
665 return NULL;
666 #else
649 ShouldNotReachHere(); 667 ShouldNotReachHere();
668 #endif
650 } 669 }
651 } 670 }
652 } 671 }
653 return NULL; 672 return NULL;
654 } 673 }
700 verify_alignment(); 719 verify_alignment();
701 } 720 }
702 721
703 // Verify stub 722 // Verify stub
704 address stub = find_stub(); 723 address stub = find_stub();
724 #ifndef GRAAL
705 assert(stub != NULL, "no stub found for static call"); 725 assert(stub != NULL, "no stub found for static call");
706 NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); // creation also verifies the object 726 NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); // creation also verifies the object
707 NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); 727 NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
728 #endif
708 729
709 // Verify state 730 // Verify state
710 assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check"); 731 assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
711 } 732 }
712 733