comparison src/cpu/x86/vm/templateTable_x86_32.cpp @ 726:be93aad57795

6655646: dynamic languages need dynamically linked call sites Summary: invokedynamic instruction (JSR 292 RI) Reviewed-by: twisti, never
author jrose
date Tue, 21 Apr 2009 23:21:04 -0700
parents 7bb995fbd3c0
children 389049f3f393
comparison
equal deleted inserted replaced
725:928912ce8438 726:be93aad57795
204 __ get_method(scratch); 204 __ get_method(scratch);
205 // Let breakpoint table handling rewrite to quicker bytecode 205 // Let breakpoint table handling rewrite to quicker bytecode
206 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), scratch, rsi, bc); 206 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), scratch, rsi, bc);
207 #ifndef ASSERT 207 #ifndef ASSERT
208 __ jmpb(patch_done); 208 __ jmpb(patch_done);
209 __ bind(fast_patch);
210 }
211 #else 209 #else
212 __ jmp(patch_done); 210 __ jmp(patch_done);
211 #endif
213 __ bind(fast_patch); 212 __ bind(fast_patch);
214 } 213 }
214 #ifdef ASSERT
215 Label okay; 215 Label okay;
216 __ load_unsigned_byte(scratch, at_bcp(0)); 216 __ load_unsigned_byte(scratch, at_bcp(0));
217 __ cmpl(scratch, (int)Bytecodes::java_code(bytecode)); 217 __ cmpl(scratch, (int)Bytecodes::java_code(bytecode));
218 __ jccb(Assembler::equal, okay); 218 __ jccb(Assembler::equal, okay);
219 __ cmpl(scratch, bc); 219 __ cmpl(scratch, bc);
2103 __ membar(order_constraint); 2103 __ membar(order_constraint);
2104 } 2104 }
2105 2105
2106 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) { 2106 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) {
2107 assert(byte_no == 1 || byte_no == 2, "byte_no out of range"); 2107 assert(byte_no == 1 || byte_no == 2, "byte_no out of range");
2108 bool is_invokedynamic = (bytecode() == Bytecodes::_invokedynamic);
2108 2109
2109 Register temp = rbx; 2110 Register temp = rbx;
2110 2111
2111 assert_different_registers(Rcache, index, temp); 2112 assert_different_registers(Rcache, index, temp);
2112 2113
2113 const int shift_count = (1 + byte_no)*BitsPerByte; 2114 const int shift_count = (1 + byte_no)*BitsPerByte;
2114 Label resolved; 2115 Label resolved;
2115 __ get_cache_and_index_at_bcp(Rcache, index, 1); 2116 __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic);
2116 __ movl(temp, Address(Rcache, 2117 if (is_invokedynamic) {
2117 index, 2118 // we are resolved if the f1 field contains a non-null CallSite object
2118 Address::times_ptr, 2119 __ cmpptr(Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()), (int32_t) NULL_WORD);
2119 constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset())); 2120 __ jcc(Assembler::notEqual, resolved);
2120 __ shrl(temp, shift_count); 2121 } else {
2121 // have we resolved this bytecode? 2122 __ movl(temp, Address(Rcache, index, Address::times_4, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset()));
2122 __ andptr(temp, 0xFF); 2123 __ shrl(temp, shift_count);
2123 __ cmpl(temp, (int)bytecode()); 2124 // have we resolved this bytecode?
2124 __ jcc(Assembler::equal, resolved); 2125 __ andl(temp, 0xFF);
2126 __ cmpl(temp, (int)bytecode());
2127 __ jcc(Assembler::equal, resolved);
2128 }
2125 2129
2126 // resolve first time through 2130 // resolve first time through
2127 address entry; 2131 address entry;
2128 switch (bytecode()) { 2132 switch (bytecode()) {
2129 case Bytecodes::_getstatic : // fall through 2133 case Bytecodes::_getstatic : // fall through
2132 case Bytecodes::_putfield : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break; 2136 case Bytecodes::_putfield : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break;
2133 case Bytecodes::_invokevirtual : // fall through 2137 case Bytecodes::_invokevirtual : // fall through
2134 case Bytecodes::_invokespecial : // fall through 2138 case Bytecodes::_invokespecial : // fall through
2135 case Bytecodes::_invokestatic : // fall through 2139 case Bytecodes::_invokestatic : // fall through
2136 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break; 2140 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break;
2141 case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break;
2137 default : ShouldNotReachHere(); break; 2142 default : ShouldNotReachHere(); break;
2138 } 2143 }
2139 __ movl(temp, (int)bytecode()); 2144 __ movl(temp, (int)bytecode());
2140 __ call_VM(noreg, entry, temp); 2145 __ call_VM(noreg, entry, temp);
2141 // Update registers with resolved info 2146 // Update registers with resolved info
2142 __ get_cache_and_index_at_bcp(Rcache, index, 1); 2147 __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic);
2143 __ bind(resolved); 2148 __ bind(resolved);
2144 } 2149 }
2145 2150
2146 2151
2147 // The cache and index registers must be set before call 2152 // The cache and index registers must be set before call
2882 // implemented elsewhere 2887 // implemented elsewhere
2883 ShouldNotReachHere(); 2888 ShouldNotReachHere();
2884 } 2889 }
2885 2890
2886 2891
2887 void TemplateTable::prepare_invoke(Register method, Register index, int byte_no, Bytecodes::Code code) { 2892 void TemplateTable::prepare_invoke(Register method, Register index, int byte_no) {
2893 bool is_invdyn_bootstrap = (byte_no < 0);
2894 if (is_invdyn_bootstrap) byte_no = -byte_no;
2895
2888 // determine flags 2896 // determine flags
2897 Bytecodes::Code code = bytecode();
2889 const bool is_invokeinterface = code == Bytecodes::_invokeinterface; 2898 const bool is_invokeinterface = code == Bytecodes::_invokeinterface;
2899 const bool is_invokedynamic = code == Bytecodes::_invokedynamic;
2890 const bool is_invokevirtual = code == Bytecodes::_invokevirtual; 2900 const bool is_invokevirtual = code == Bytecodes::_invokevirtual;
2891 const bool is_invokespecial = code == Bytecodes::_invokespecial; 2901 const bool is_invokespecial = code == Bytecodes::_invokespecial;
2892 const bool load_receiver = code != Bytecodes::_invokestatic; 2902 const bool load_receiver = (code != Bytecodes::_invokestatic && code != Bytecodes::_invokedynamic);
2893 const bool receiver_null_check = is_invokespecial; 2903 const bool receiver_null_check = is_invokespecial;
2894 const bool save_flags = is_invokeinterface || is_invokevirtual; 2904 const bool save_flags = is_invokeinterface || is_invokevirtual;
2895 // setup registers & access constant pool cache 2905 // setup registers & access constant pool cache
2896 const Register recv = rcx; 2906 const Register recv = rcx;
2897 const Register flags = rdx; 2907 const Register flags = rdx;
2898 assert_different_registers(method, index, recv, flags); 2908 assert_different_registers(method, index, recv, flags);
2899 2909
2910 assert(!is_invdyn_bootstrap || is_invokedynamic, "byte_no<0 hack only for invdyn");
2911
2900 // save 'interpreter return address' 2912 // save 'interpreter return address'
2901 __ save_bcp(); 2913 __ save_bcp();
2902 2914
2903 load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual); 2915 load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual);
2904 2916
2905 // load receiver if needed (note: no return address pushed yet) 2917 // load receiver if needed (note: no return address pushed yet)
2906 if (load_receiver) { 2918 if (load_receiver) {
2907 __ movl(recv, flags); 2919 __ movl(recv, flags);
2908 __ andl(recv, 0xFF); 2920 __ andl(recv, 0xFF);
2909 // recv count is 0 based? 2921 // recv count is 0 based?
2910 __ movptr(recv, Address(rsp, recv, Interpreter::stackElementScale(), -Interpreter::expr_offset_in_bytes(1))); 2922 Address recv_addr(rsp, recv, Interpreter::stackElementScale(), -Interpreter::expr_offset_in_bytes(1));
2911 __ verify_oop(recv); 2923 if (is_invokedynamic) {
2924 __ lea(recv, recv_addr);
2925 } else {
2926 __ movptr(recv, recv_addr);
2927 __ verify_oop(recv);
2928 }
2912 } 2929 }
2913 2930
2914 // do null check if needed 2931 // do null check if needed
2915 if (receiver_null_check) { 2932 if (receiver_null_check) {
2916 __ null_check(recv); 2933 __ null_check(recv);
2924 __ shrl(flags, ConstantPoolCacheEntry::tosBits); 2941 __ shrl(flags, ConstantPoolCacheEntry::tosBits);
2925 // Make sure we don't need to mask flags for tosBits after the above shift 2942 // Make sure we don't need to mask flags for tosBits after the above shift
2926 ConstantPoolCacheEntry::verify_tosBits(); 2943 ConstantPoolCacheEntry::verify_tosBits();
2927 // load return address 2944 // load return address
2928 { 2945 {
2929 ExternalAddress table(is_invokeinterface ? (address)Interpreter::return_5_addrs_by_index_table() : 2946 address table_addr;
2930 (address)Interpreter::return_3_addrs_by_index_table()); 2947 if (is_invdyn_bootstrap)
2948 table_addr = (address)Interpreter::return_5_unbox_addrs_by_index_table();
2949 else if (is_invokeinterface || is_invokedynamic)
2950 table_addr = (address)Interpreter::return_5_addrs_by_index_table();
2951 else
2952 table_addr = (address)Interpreter::return_3_addrs_by_index_table();
2953 ExternalAddress table(table_addr);
2931 __ movptr(flags, ArrayAddress(table, Address(noreg, flags, Address::times_ptr))); 2954 __ movptr(flags, ArrayAddress(table, Address(noreg, flags, Address::times_ptr)));
2932 } 2955 }
2933 2956
2934 // push return address 2957 // push return address
2935 __ push(flags); 2958 __ push(flags);
2988 } 3011 }
2989 3012
2990 3013
2991 void TemplateTable::invokevirtual(int byte_no) { 3014 void TemplateTable::invokevirtual(int byte_no) {
2992 transition(vtos, vtos); 3015 transition(vtos, vtos);
2993 prepare_invoke(rbx, noreg, byte_no, bytecode()); 3016 prepare_invoke(rbx, noreg, byte_no);
2994 3017
2995 // rbx,: index 3018 // rbx,: index
2996 // rcx: receiver 3019 // rcx: receiver
2997 // rdx: flags 3020 // rdx: flags
2998 3021
3000 } 3023 }
3001 3024
3002 3025
3003 void TemplateTable::invokespecial(int byte_no) { 3026 void TemplateTable::invokespecial(int byte_no) {
3004 transition(vtos, vtos); 3027 transition(vtos, vtos);
3005 prepare_invoke(rbx, noreg, byte_no, bytecode()); 3028 prepare_invoke(rbx, noreg, byte_no);
3006 // do the call 3029 // do the call
3007 __ verify_oop(rbx); 3030 __ verify_oop(rbx);
3008 __ profile_call(rax); 3031 __ profile_call(rax);
3009 __ jump_from_interpreted(rbx, rax); 3032 __ jump_from_interpreted(rbx, rax);
3010 } 3033 }
3011 3034
3012 3035
3013 void TemplateTable::invokestatic(int byte_no) { 3036 void TemplateTable::invokestatic(int byte_no) {
3014 transition(vtos, vtos); 3037 transition(vtos, vtos);
3015 prepare_invoke(rbx, noreg, byte_no, bytecode()); 3038 prepare_invoke(rbx, noreg, byte_no);
3016 // do the call 3039 // do the call
3017 __ verify_oop(rbx); 3040 __ verify_oop(rbx);
3018 __ profile_call(rax); 3041 __ profile_call(rax);
3019 __ jump_from_interpreted(rbx, rax); 3042 __ jump_from_interpreted(rbx, rax);
3020 } 3043 }
3026 } 3049 }
3027 3050
3028 3051
3029 void TemplateTable::invokeinterface(int byte_no) { 3052 void TemplateTable::invokeinterface(int byte_no) {
3030 transition(vtos, vtos); 3053 transition(vtos, vtos);
3031 prepare_invoke(rax, rbx, byte_no, bytecode()); 3054 prepare_invoke(rax, rbx, byte_no);
3032 3055
3033 // rax,: Interface 3056 // rax,: Interface
3034 // rbx,: index 3057 // rbx,: index
3035 // rcx: receiver 3058 // rcx: receiver
3036 // rdx: flags 3059 // rdx: flags
3098 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed) 3121 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
3099 __ call_VM(noreg, CAST_FROM_FN_PTR(address, 3122 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
3100 InterpreterRuntime::throw_IncompatibleClassChangeError)); 3123 InterpreterRuntime::throw_IncompatibleClassChangeError));
3101 // the call_VM checks for exception, so we should never return here. 3124 // the call_VM checks for exception, so we should never return here.
3102 __ should_not_reach_here(); 3125 __ should_not_reach_here();
3126 }
3127
3128 void TemplateTable::invokedynamic(int byte_no) {
3129 transition(vtos, vtos);
3130
3131 if (!EnableInvokeDynamic) {
3132 // We should not encounter this bytecode if !EnableInvokeDynamic.
3133 // The verifier will stop it. However, if we get past the verifier,
3134 // this will stop the thread in a reasonable way, without crashing the JVM.
3135 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
3136 InterpreterRuntime::throw_IncompatibleClassChangeError));
3137 // the call_VM checks for exception, so we should never return here.
3138 __ should_not_reach_here();
3139 return;
3140 }
3141
3142 prepare_invoke(rax, rbx, byte_no);
3143
3144 // rax: CallSite object (f1)
3145 // rbx: unused (f2)
3146 // rcx: receiver address
3147 // rdx: flags (unused)
3148
3149 if (ProfileInterpreter) {
3150 Label L;
3151 // %%% should make a type profile for any invokedynamic that takes a ref argument
3152 // profile this call
3153 __ profile_call(rsi);
3154 }
3155
3156 Label handle_unlinked_site;
3157 __ movptr(rcx, Address(rax, __ delayed_value(sun_dyn_CallSiteImpl::target_offset_in_bytes, rcx)));
3158 __ testptr(rcx, rcx);
3159 __ jcc(Assembler::zero, handle_unlinked_site);
3160
3161 __ prepare_to_jump_from_interpreted();
3162 __ jump_to_method_handle_entry(rcx, rdx);
3163
3164 // Initial calls come here...
3165 __ bind(handle_unlinked_site);
3166 __ pop(rcx); // remove return address pushed by prepare_invoke
3167
3168 // box stacked arguments into an array for the bootstrap method
3169 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::bootstrap_invokedynamic);
3170 __ restore_bcp(); // rsi must be correct for call_VM
3171 __ call_VM(rax, entry, rax);
3172 __ movl(rdi, rax); // protect bootstrap MH from prepare_invoke
3173
3174 // recompute return address
3175 __ restore_bcp(); // rsi must be correct for prepare_invoke
3176 prepare_invoke(rax, rbx, -byte_no); // smashes rcx, rdx
3177 // rax: CallSite object (f1)
3178 // rbx: unused (f2)
3179 // rdi: bootstrap MH
3180 // rdx: flags
3181
3182 // now load up the arglist, which has been neatly boxed
3183 __ get_thread(rcx);
3184 __ movptr(rdx, Address(rcx, JavaThread::vm_result_2_offset()));
3185 __ movptr(Address(rcx, JavaThread::vm_result_2_offset()), NULL_WORD);
3186 __ verify_oop(rdx);
3187 // rdx = arglist
3188
3189 // save SP now, before we add the bootstrap call to the stack
3190 // We must preserve a fiction that the original arguments are outgoing,
3191 // because the return sequence will reset the stack to this point
3192 // and then pop all those arguments. It seems error-prone to use
3193 // a different argument list size just for bootstrapping.
3194 __ prepare_to_jump_from_interpreted();
3195
3196 // Now let's play adapter, pushing the real arguments on the stack.
3197 __ pop(rbx); // return PC
3198 __ push(rdi); // boot MH
3199 __ push(rax); // call site
3200 __ push(rdx); // arglist
3201 __ push(rbx); // return PC, again
3202 __ mov(rcx, rdi);
3203 __ jump_to_method_handle_entry(rcx, rdx);
3103 } 3204 }
3104 3205
3105 //---------------------------------------------------------------------------------------------------- 3206 //----------------------------------------------------------------------------------------------------
3106 // Allocation 3207 // Allocation
3107 3208