comparison src/cpu/x86/vm/sharedRuntime_x86_64.cpp @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 1d7922586cf6
children 8a02ca5e5576
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
27 #include "assembler_x86.inline.hpp" 27 #include "assembler_x86.inline.hpp"
28 #include "code/debugInfoRec.hpp" 28 #include "code/debugInfoRec.hpp"
29 #include "code/icBuffer.hpp" 29 #include "code/icBuffer.hpp"
30 #include "code/vtableStubs.hpp" 30 #include "code/vtableStubs.hpp"
31 #include "interpreter/interpreter.hpp" 31 #include "interpreter/interpreter.hpp"
32 #include "oops/compiledICHolderOop.hpp" 32 #include "oops/compiledICHolder.hpp"
33 #include "prims/jvmtiRedefineClassesTrace.hpp" 33 #include "prims/jvmtiRedefineClassesTrace.hpp"
34 #include "runtime/sharedRuntime.hpp" 34 #include "runtime/sharedRuntime.hpp"
35 #include "runtime/vframeArray.hpp" 35 #include "runtime/vframeArray.hpp"
36 #include "vmreg_x86.inline.hpp" 36 #include "vmreg_x86.inline.hpp"
37 #ifdef COMPILER1 37 #ifdef COMPILER1
411 } 411 }
412 412
413 // Patch the callers callsite with entry to compiled code if it exists. 413 // Patch the callers callsite with entry to compiled code if it exists.
414 static void patch_callers_callsite(MacroAssembler *masm) { 414 static void patch_callers_callsite(MacroAssembler *masm) {
415 Label L; 415 Label L;
416 __ verify_oop(rbx); 416 __ cmpptr(Address(rbx, in_bytes(Method::code_offset())), (int32_t)NULL_WORD);
417 __ cmpptr(Address(rbx, in_bytes(methodOopDesc::code_offset())), (int32_t)NULL_WORD);
418 __ jcc(Assembler::equal, L); 417 __ jcc(Assembler::equal, L);
419 418
420 // Save the current stack pointer 419 // Save the current stack pointer
421 __ mov(r13, rsp); 420 __ mov(r13, rsp);
422 // Schedule the branch target address early. 421 // Schedule the branch target address early.
426 425
427 // align stack so push_CPU_state doesn't fault 426 // align stack so push_CPU_state doesn't fault
428 __ andptr(rsp, -(StackAlignmentInBytes)); 427 __ andptr(rsp, -(StackAlignmentInBytes));
429 __ push_CPU_state(); 428 __ push_CPU_state();
430 429
431
432 __ verify_oop(rbx);
433 // VM needs caller's callsite 430 // VM needs caller's callsite
434 // VM needs target method 431 // VM needs target method
435 // This needs to be a long call since we will relocate this adapter to 432 // This needs to be a long call since we will relocate this adapter to
436 // the codeBuffer and it may not reach 433 // the codeBuffer and it may not reach
437 434
584 } 581 }
585 } 582 }
586 } 583 }
587 584
588 // Schedule the branch target address early. 585 // Schedule the branch target address early.
589 __ movptr(rcx, Address(rbx, in_bytes(methodOopDesc::interpreter_entry_offset()))); 586 __ movptr(rcx, Address(rbx, in_bytes(Method::interpreter_entry_offset())));
590 __ jmp(rcx); 587 __ jmp(rcx);
591 } 588 }
592 589
593 static void range_check(MacroAssembler* masm, Register pc_reg, Register temp_reg, 590 static void range_check(MacroAssembler* masm, Register pc_reg, Register temp_reg,
594 address code_start, address code_end, 591 address code_start, address code_end,
696 const Register saved_sp = rax; 693 const Register saved_sp = rax;
697 __ movptr(saved_sp, r11); 694 __ movptr(saved_sp, r11);
698 695
699 // Will jump to the compiled code just as if compiled code was doing it. 696 // Will jump to the compiled code just as if compiled code was doing it.
700 // Pre-load the register-jump target early, to schedule it better. 697 // Pre-load the register-jump target early, to schedule it better.
701 __ movptr(r11, Address(rbx, in_bytes(methodOopDesc::from_compiled_offset()))); 698 __ movptr(r11, Address(rbx, in_bytes(Method::from_compiled_offset())));
702 699
703 // Now generate the shuffle code. Pick up all register args and move the 700 // Now generate the shuffle code. Pick up all register args and move the
704 // rest through the floating point stack top. 701 // rest through the floating point stack top.
705 for (int i = 0; i < total_args_passed; i++) { 702 for (int i = 0; i < total_args_passed; i++) {
706 if (sig_bt[i] == T_VOID) { 703 if (sig_bt[i] == T_VOID) {
791 // is possible. So we stash the desired callee in the thread 788 // is possible. So we stash the desired callee in the thread
792 // and the vm will find there should this case occur. 789 // and the vm will find there should this case occur.
793 790
794 __ movptr(Address(r15_thread, JavaThread::callee_target_offset()), rbx); 791 __ movptr(Address(r15_thread, JavaThread::callee_target_offset()), rbx);
795 792
796 // put methodOop where a c2i would expect should we end up there 793 // put Method* where a c2i would expect should we end up there
797 // only needed becaus eof c2 resolve stubs return methodOop as a result in 794 // only needed becaus eof c2 resolve stubs return Method* as a result in
798 // rax 795 // rax
799 __ mov(rax, rbx); 796 __ mov(rax, rbx);
800 __ jmp(r11); 797 __ jmp(r11);
801 } 798 }
802 799
810 address i2c_entry = __ pc(); 807 address i2c_entry = __ pc();
811 808
812 gen_i2c_adapter(masm, total_args_passed, comp_args_on_stack, sig_bt, regs); 809 gen_i2c_adapter(masm, total_args_passed, comp_args_on_stack, sig_bt, regs);
813 810
814 // ------------------------------------------------------------------------- 811 // -------------------------------------------------------------------------
815 // Generate a C2I adapter. On entry we know rbx holds the methodOop during calls 812 // Generate a C2I adapter. On entry we know rbx holds the Method* during calls
816 // to the interpreter. The args start out packed in the compiled layout. They 813 // to the interpreter. The args start out packed in the compiled layout. They
817 // need to be unpacked into the interpreter layout. This will almost always 814 // need to be unpacked into the interpreter layout. This will almost always
818 // require some stack space. We grow the current (compiled) stack, then repack 815 // require some stack space. We grow the current (compiled) stack, then repack
819 // the args. We finally end in a jump to the generic interpreter entry point. 816 // the args. We finally end in a jump to the generic interpreter entry point.
820 // On exit from the interpreter, the interpreter will restore our SP (lest the 817 // On exit from the interpreter, the interpreter will restore our SP (lest the
827 Register holder = rax; 824 Register holder = rax;
828 Register receiver = j_rarg0; 825 Register receiver = j_rarg0;
829 Register temp = rbx; 826 Register temp = rbx;
830 827
831 { 828 {
832 __ verify_oop(holder);
833 __ load_klass(temp, receiver); 829 __ load_klass(temp, receiver);
834 __ verify_oop(temp); 830 __ cmpptr(temp, Address(holder, CompiledICHolder::holder_klass_offset()));
835 831 __ movptr(rbx, Address(holder, CompiledICHolder::holder_method_offset()));
836 __ cmpptr(temp, Address(holder, compiledICHolderOopDesc::holder_klass_offset()));
837 __ movptr(rbx, Address(holder, compiledICHolderOopDesc::holder_method_offset()));
838 __ jcc(Assembler::equal, ok); 832 __ jcc(Assembler::equal, ok);
839 __ jump(RuntimeAddress(SharedRuntime::get_ic_miss_stub())); 833 __ jump(RuntimeAddress(SharedRuntime::get_ic_miss_stub()));
840 834
841 __ bind(ok); 835 __ bind(ok);
842 // Method might have been compiled since the call site was patched to 836 // Method might have been compiled since the call site was patched to
843 // interpreted if that is the case treat it as a miss so we can get 837 // interpreted if that is the case treat it as a miss so we can get
844 // the call site corrected. 838 // the call site corrected.
845 __ cmpptr(Address(rbx, in_bytes(methodOopDesc::code_offset())), (int32_t)NULL_WORD); 839 __ cmpptr(Address(rbx, in_bytes(Method::code_offset())), (int32_t)NULL_WORD);
846 __ jcc(Assembler::equal, skip_fixup); 840 __ jcc(Assembler::equal, skip_fixup);
847 __ jump(RuntimeAddress(SharedRuntime::get_ic_miss_stub())); 841 __ jump(RuntimeAddress(SharedRuntime::get_ic_miss_stub()));
848 } 842 }
849 843
850 address c2i_entry = __ pc(); 844 address c2i_entry = __ pc();
2182 2176
2183 { 2177 {
2184 SkipIfEqual skip(masm, &DTraceMethodProbes, false); 2178 SkipIfEqual skip(masm, &DTraceMethodProbes, false);
2185 // protect the args we've loaded 2179 // protect the args we've loaded
2186 save_args(masm, total_c_args, c_arg, out_regs); 2180 save_args(masm, total_c_args, c_arg, out_regs);
2187 __ movoop(c_rarg1, JNIHandles::make_local(method())); 2181 __ mov_metadata(c_rarg1, method());
2188 __ call_VM_leaf( 2182 __ call_VM_leaf(
2189 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), 2183 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry),
2190 r15_thread, c_rarg1); 2184 r15_thread, c_rarg1);
2191 restore_args(masm, total_c_args, c_arg, out_regs); 2185 restore_args(masm, total_c_args, c_arg, out_regs);
2192 } 2186 }
2193 2187
2194 // RedefineClasses() tracing support for obsolete method entry 2188 // RedefineClasses() tracing support for obsolete method entry
2195 if (RC_TRACE_IN_RANGE(0x00001000, 0x00002000)) { 2189 if (RC_TRACE_IN_RANGE(0x00001000, 0x00002000)) {
2196 // protect the args we've loaded 2190 // protect the args we've loaded
2197 save_args(masm, total_c_args, c_arg, out_regs); 2191 save_args(masm, total_c_args, c_arg, out_regs);
2198 __ movoop(c_rarg1, JNIHandles::make_local(method())); 2192 __ mov_metadata(c_rarg1, method());
2199 __ call_VM_leaf( 2193 __ call_VM_leaf(
2200 CAST_FROM_FN_PTR(address, SharedRuntime::rc_trace_method_entry), 2194 CAST_FROM_FN_PTR(address, SharedRuntime::rc_trace_method_entry),
2201 r15_thread, c_rarg1); 2195 r15_thread, c_rarg1);
2202 restore_args(masm, total_c_args, c_arg, out_regs); 2196 restore_args(masm, total_c_args, c_arg, out_regs);
2203 } 2197 }
2446 2440
2447 } 2441 }
2448 { 2442 {
2449 SkipIfEqual skip(masm, &DTraceMethodProbes, false); 2443 SkipIfEqual skip(masm, &DTraceMethodProbes, false);
2450 save_native_result(masm, ret_type, stack_slots); 2444 save_native_result(masm, ret_type, stack_slots);
2451 __ movoop(c_rarg1, JNIHandles::make_local(method())); 2445 __ mov_metadata(c_rarg1, method());
2452 __ call_VM_leaf( 2446 __ call_VM_leaf(
2453 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), 2447 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit),
2454 r15_thread, c_rarg1); 2448 r15_thread, c_rarg1);
2455 restore_native_result(masm, ret_type, stack_slots); 2449 restore_native_result(masm, ret_type, stack_slots);
2456 } 2450 }
3877 // check for pending exceptions 3871 // check for pending exceptions
3878 Label pending; 3872 Label pending;
3879 __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD); 3873 __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t)NULL_WORD);
3880 __ jcc(Assembler::notEqual, pending); 3874 __ jcc(Assembler::notEqual, pending);
3881 3875
3882 // get the returned methodOop 3876 // get the returned Method*
3883 __ movptr(rbx, Address(r15_thread, JavaThread::vm_result_offset())); 3877 __ get_vm_result_2(rbx, r15_thread);
3884 __ movptr(Address(rsp, RegisterSaver::rbx_offset_in_bytes()), rbx); 3878 __ movptr(Address(rsp, RegisterSaver::rbx_offset_in_bytes()), rbx);
3885 3879
3886 __ movptr(Address(rsp, RegisterSaver::rax_offset_in_bytes()), rax); 3880 __ movptr(Address(rsp, RegisterSaver::rax_offset_in_bytes()), rax);
3887 3881
3888 RegisterSaver::restore_live_registers(masm); 3882 RegisterSaver::restore_live_registers(masm);