comparison src/cpu/x86/vm/templateTable_x86_32.cpp @ 6266:1d7922586cf6

7023639: JSR 292 method handle invocation needs a fast path for compiled code 6984705: JSR 292 method handle creation should not go through JNI Summary: remove assembly code for JDK 7 chained method handles Reviewed-by: jrose, twisti, kvn, mhaupt Contributed-by: John Rose <john.r.rose@oracle.com>, Christian Thalinger <christian.thalinger@oracle.com>, Michael Haupt <michael.haupt@oracle.com>
author twisti
date Tue, 24 Jul 2012 10:51:00 -0700
parents 19e197e2a1af
children da91efe96a93
comparison
equal deleted inserted replaced
6241:aba91a731143 6266:1d7922586cf6
444 } 444 }
445 445
446 const Register cache = rcx; 446 const Register cache = rcx;
447 const Register index = rdx; 447 const Register index = rdx;
448 448
449 resolve_cache_and_index(f1_oop, rax, cache, index, wide ? sizeof(u2) : sizeof(u1)); 449 resolve_cache_and_index(f12_oop, rax, cache, index, wide ? sizeof(u2) : sizeof(u1));
450 if (VerifyOops) { 450 if (VerifyOops) {
451 __ verify_oop(rax); 451 __ verify_oop(rax);
452 } 452 }
453 453
454 Label L_done, L_throw_exception; 454 Label L_done, L_throw_exception;
455 const Register con_klass_temp = rcx; // same as Rcache 455 const Register con_klass_temp = rcx; // same as cache
456 __ load_klass(con_klass_temp, rax); 456 __ load_klass(con_klass_temp, rax);
457 __ cmpptr(con_klass_temp, ExternalAddress((address)Universe::systemObjArrayKlassObj_addr())); 457 __ cmpptr(con_klass_temp, ExternalAddress((address)Universe::systemObjArrayKlassObj_addr()));
458 __ jcc(Assembler::notEqual, L_done); 458 __ jcc(Assembler::notEqual, L_done);
459 __ cmpl(Address(rax, arrayOopDesc::length_offset_in_bytes()), 0); 459 __ cmpl(Address(rax, arrayOopDesc::length_offset_in_bytes()), 0);
460 __ jcc(Assembler::notEqual, L_throw_exception); 460 __ jcc(Assembler::notEqual, L_throw_exception);
2082 void TemplateTable::resolve_cache_and_index(int byte_no, 2082 void TemplateTable::resolve_cache_and_index(int byte_no,
2083 Register result, 2083 Register result,
2084 Register Rcache, 2084 Register Rcache,
2085 Register index, 2085 Register index,
2086 size_t index_size) { 2086 size_t index_size) {
2087 Register temp = rbx; 2087 const Register temp = rbx;
2088
2089 assert_different_registers(result, Rcache, index, temp); 2088 assert_different_registers(result, Rcache, index, temp);
2090 2089
2091 Label resolved; 2090 Label resolved;
2092 if (byte_no == f1_oop) { 2091 if (byte_no == f12_oop) {
2093 // We are resolved if the f1 field contains a non-null object (CallSite, etc.) 2092 // We are resolved if the f1 field contains a non-null object (CallSite, MethodType, etc.)
2094 // This kind of CP cache entry does not need to match the flags byte, because 2093 // This kind of CP cache entry does not need to match bytecode_1 or bytecode_2, because
2095 // there is a 1-1 relation between bytecode type and CP entry type. 2094 // there is a 1-1 relation between bytecode type and CP entry type.
2095 // The caller will also load a methodOop from f2.
2096 assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD) 2096 assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD)
2097 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); 2097 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2098 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset())); 2098 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()));
2099 __ testptr(result, result); 2099 __ testptr(result, result);
2100 __ jcc(Assembler::notEqual, resolved); 2100 __ jcc(Assembler::notEqual, resolved);
2110 address entry; 2110 address entry;
2111 switch (bytecode()) { 2111 switch (bytecode()) {
2112 case Bytecodes::_getstatic : // fall through 2112 case Bytecodes::_getstatic : // fall through
2113 case Bytecodes::_putstatic : // fall through 2113 case Bytecodes::_putstatic : // fall through
2114 case Bytecodes::_getfield : // fall through 2114 case Bytecodes::_getfield : // fall through
2115 case Bytecodes::_putfield : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break; 2115 case Bytecodes::_putfield : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break;
2116 case Bytecodes::_invokevirtual : // fall through 2116 case Bytecodes::_invokevirtual : // fall through
2117 case Bytecodes::_invokespecial : // fall through 2117 case Bytecodes::_invokespecial : // fall through
2118 case Bytecodes::_invokestatic : // fall through 2118 case Bytecodes::_invokestatic : // fall through
2119 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break; 2119 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break;
2120 case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break; 2120 case Bytecodes::_invokehandle : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle); break;
2121 case Bytecodes::_fast_aldc : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break; 2121 case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break;
2122 case Bytecodes::_fast_aldc_w : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break; 2122 case Bytecodes::_fast_aldc : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break;
2123 default : ShouldNotReachHere(); break; 2123 case Bytecodes::_fast_aldc_w : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break;
2124 default:
2125 fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode())));
2126 break;
2124 } 2127 }
2125 __ movl(temp, (int)bytecode()); 2128 __ movl(temp, (int)bytecode());
2126 __ call_VM(noreg, entry, temp); 2129 __ call_VM(noreg, entry, temp);
2127 // Update registers with resolved info 2130 // Update registers with resolved info
2128 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); 2131 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2147 in_bytes(cp_base_offset + ConstantPoolCacheEntry::f2_offset()))); 2150 in_bytes(cp_base_offset + ConstantPoolCacheEntry::f2_offset())));
2148 // Flags 2151 // Flags
2149 __ movl(flags, Address(cache, index, Address::times_ptr, 2152 __ movl(flags, Address(cache, index, Address::times_ptr,
2150 in_bytes(cp_base_offset + ConstantPoolCacheEntry::flags_offset()))); 2153 in_bytes(cp_base_offset + ConstantPoolCacheEntry::flags_offset())));
2151 2154
2152 // klass overwrite register 2155 // klass overwrite register
2153 if (is_static) { 2156 if (is_static) {
2154 __ movptr(obj, Address(cache, index, Address::times_ptr, 2157 __ movptr(obj, Address(cache, index, Address::times_ptr,
2155 in_bytes(cp_base_offset + ConstantPoolCacheEntry::f1_offset()))); 2158 in_bytes(cp_base_offset + ConstantPoolCacheEntry::f1_offset())));
2156 } 2159 }
2157 } 2160 }
2159 void TemplateTable::load_invoke_cp_cache_entry(int byte_no, 2162 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2160 Register method, 2163 Register method,
2161 Register itable_index, 2164 Register itable_index,
2162 Register flags, 2165 Register flags,
2163 bool is_invokevirtual, 2166 bool is_invokevirtual,
2164 bool is_invokevfinal /*unused*/, 2167 bool is_invokevfinal, /*unused*/
2165 bool is_invokedynamic) { 2168 bool is_invokedynamic) {
2166 // setup registers 2169 // setup registers
2167 const Register cache = rcx; 2170 const Register cache = rcx;
2168 const Register index = rdx; 2171 const Register index = rdx;
2169 assert_different_registers(method, flags); 2172 assert_different_registers(method, flags);
2170 assert_different_registers(method, cache, index); 2173 assert_different_registers(method, cache, index);
2171 assert_different_registers(itable_index, flags); 2174 assert_different_registers(itable_index, flags);
2172 assert_different_registers(itable_index, cache, index); 2175 assert_different_registers(itable_index, cache, index);
2173 // determine constant pool cache field offsets 2176 // determine constant pool cache field offsets
2177 assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant");
2174 const int method_offset = in_bytes( 2178 const int method_offset = in_bytes(
2175 constantPoolCacheOopDesc::base_offset() + 2179 constantPoolCacheOopDesc::base_offset() +
2176 (is_invokevirtual 2180 ((byte_no == f2_byte)
2177 ? ConstantPoolCacheEntry::f2_offset() 2181 ? ConstantPoolCacheEntry::f2_offset()
2178 : ConstantPoolCacheEntry::f1_offset() 2182 : ConstantPoolCacheEntry::f1_offset()));
2179 )
2180 );
2181 const int flags_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + 2183 const int flags_offset = in_bytes(constantPoolCacheOopDesc::base_offset() +
2182 ConstantPoolCacheEntry::flags_offset()); 2184 ConstantPoolCacheEntry::flags_offset());
2183 // access constant pool cache fields 2185 // access constant pool cache fields
2184 const int index_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + 2186 const int index_offset = in_bytes(constantPoolCacheOopDesc::base_offset() +
2185 ConstantPoolCacheEntry::f2_offset()); 2187 ConstantPoolCacheEntry::f2_offset());
2186 2188
2187 if (byte_no == f1_oop) { 2189 if (byte_no == f12_oop) {
2188 // Resolved f1_oop goes directly into 'method' register. 2190 // Resolved f1_oop (CallSite, MethodType, etc.) goes into 'itable_index'.
2189 assert(is_invokedynamic, ""); 2191 // Resolved f2_oop (methodOop invoker) will go into 'method' (at index_offset).
2190 resolve_cache_and_index(byte_no, method, cache, index, sizeof(u4)); 2192 // See ConstantPoolCacheEntry::set_dynamic_call and set_method_handle.
2193 size_t index_size = (is_invokedynamic ? sizeof(u4) : sizeof(u2));
2194 resolve_cache_and_index(byte_no, itable_index, cache, index, index_size);
2195 __ movptr(method, Address(cache, index, Address::times_ptr, index_offset));
2196 itable_index = noreg; // hack to disable load below
2191 } else { 2197 } else {
2192 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); 2198 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2));
2193 __ movptr(method, Address(cache, index, Address::times_ptr, method_offset)); 2199 __ movptr(method, Address(cache, index, Address::times_ptr, method_offset));
2194 } 2200 }
2195 if (itable_index != noreg) { 2201 if (itable_index != noreg) {
2202 // pick up itable index from f2 also:
2203 assert(byte_no == f1_byte, "already picked up f1");
2196 __ movptr(itable_index, Address(cache, index, Address::times_ptr, index_offset)); 2204 __ movptr(itable_index, Address(cache, index, Address::times_ptr, index_offset));
2197 } 2205 }
2198 __ movl(flags, Address(cache, index, Address::times_ptr, flags_offset)); 2206 __ movl(flags, Address(cache, index, Address::times_ptr, flags_offset));
2199 } 2207 }
2200 2208
2258 const Address lo(obj, off, Address::times_1, 0*wordSize); 2266 const Address lo(obj, off, Address::times_1, 0*wordSize);
2259 const Address hi(obj, off, Address::times_1, 1*wordSize); 2267 const Address hi(obj, off, Address::times_1, 1*wordSize);
2260 2268
2261 Label Done, notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble; 2269 Label Done, notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
2262 2270
2263 __ shrl(flags, ConstantPoolCacheEntry::tosBits); 2271 __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2264 assert(btos == 0, "change code, btos != 0"); 2272 assert(btos == 0, "change code, btos != 0");
2265 // btos 2273 // btos
2266 __ andptr(flags, 0x0f); 2274 __ andptr(flags, ConstantPoolCacheEntry::tos_state_mask);
2267 __ jcc(Assembler::notZero, notByte); 2275 __ jcc(Assembler::notZero, notByte);
2268 2276
2269 __ load_signed_byte(rax, lo ); 2277 __ load_signed_byte(rax, lo );
2270 __ push(btos); 2278 __ push(btos);
2271 // Rewrite bytecode to be faster 2279 // Rewrite bytecode to be faster
2413 // the object is. 2421 // the object is.
2414 Label two_word, valsize_known; 2422 Label two_word, valsize_known;
2415 __ movl(rcx, Address(rax, rdx, Address::times_ptr, in_bytes(cp_base_offset + 2423 __ movl(rcx, Address(rax, rdx, Address::times_ptr, in_bytes(cp_base_offset +
2416 ConstantPoolCacheEntry::flags_offset()))); 2424 ConstantPoolCacheEntry::flags_offset())));
2417 __ mov(rbx, rsp); 2425 __ mov(rbx, rsp);
2418 __ shrl(rcx, ConstantPoolCacheEntry::tosBits); 2426 __ shrl(rcx, ConstantPoolCacheEntry::tos_state_shift);
2419 // Make sure we don't need to mask rcx for tosBits after the above shift 2427 // Make sure we don't need to mask rcx after the above shift
2420 ConstantPoolCacheEntry::verify_tosBits(); 2428 ConstantPoolCacheEntry::verify_tos_state_shift();
2421 __ cmpl(rcx, ltos); 2429 __ cmpl(rcx, ltos);
2422 __ jccb(Assembler::equal, two_word); 2430 __ jccb(Assembler::equal, two_word);
2423 __ cmpl(rcx, dtos); 2431 __ cmpl(rcx, dtos);
2424 __ jccb(Assembler::equal, two_word); 2432 __ jccb(Assembler::equal, two_word);
2425 __ addptr(rbx, Interpreter::expr_offset_in_bytes(1)); // one word jvalue (not ltos, dtos) 2433 __ addptr(rbx, Interpreter::expr_offset_in_bytes(1)); // one word jvalue (not ltos, dtos)
2465 // Doug Lea believes this is not needed with current Sparcs (TSO) and Intel (PSO). 2473 // Doug Lea believes this is not needed with current Sparcs (TSO) and Intel (PSO).
2466 // volatile_barrier( ); 2474 // volatile_barrier( );
2467 2475
2468 Label notVolatile, Done; 2476 Label notVolatile, Done;
2469 __ movl(rdx, flags); 2477 __ movl(rdx, flags);
2470 __ shrl(rdx, ConstantPoolCacheEntry::volatileField); 2478 __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2471 __ andl(rdx, 0x1); 2479 __ andl(rdx, 0x1);
2472 2480
2473 // field addresses 2481 // field addresses
2474 const Address lo(obj, off, Address::times_1, 0*wordSize); 2482 const Address lo(obj, off, Address::times_1, 0*wordSize);
2475 const Address hi(obj, off, Address::times_1, 1*wordSize); 2483 const Address hi(obj, off, Address::times_1, 1*wordSize);
2476 2484
2477 Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble; 2485 Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
2478 2486
2479 __ shrl(flags, ConstantPoolCacheEntry::tosBits); 2487 __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2480 assert(btos == 0, "change code, btos != 0"); 2488 assert(btos == 0, "change code, btos != 0");
2481 __ andl(flags, 0x0f); 2489 __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2482 __ jcc(Assembler::notZero, notByte); 2490 __ jcc(Assembler::notZero, notByte);
2483 2491
2484 // btos 2492 // btos
2485 { 2493 {
2486 __ pop(btos); 2494 __ pop(btos);
2717 2725
2718 // Doug Lea believes this is not needed with current Sparcs (TSO) and Intel (PSO). 2726 // Doug Lea believes this is not needed with current Sparcs (TSO) and Intel (PSO).
2719 // volatile_barrier( ); 2727 // volatile_barrier( );
2720 2728
2721 Label notVolatile, Done; 2729 Label notVolatile, Done;
2722 __ shrl(rdx, ConstantPoolCacheEntry::volatileField); 2730 __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2723 __ andl(rdx, 0x1); 2731 __ andl(rdx, 0x1);
2724 // Check for volatile store 2732 // Check for volatile store
2725 __ testl(rdx, rdx); 2733 __ testl(rdx, rdx);
2726 __ jcc(Assembler::zero, notVolatile); 2734 __ jcc(Assembler::zero, notVolatile);
2727 2735
2883 // implemented elsewhere 2891 // implemented elsewhere
2884 ShouldNotReachHere(); 2892 ShouldNotReachHere();
2885 } 2893 }
2886 2894
2887 2895
2888 void TemplateTable::prepare_invoke(Register method, Register index, int byte_no) { 2896 void TemplateTable::prepare_invoke(int byte_no,
2897 Register method, // linked method (or i-klass)
2898 Register index, // itable index, MethodType, etc.
2899 Register recv, // if caller wants to see it
2900 Register flags // if caller wants to test it
2901 ) {
2889 // determine flags 2902 // determine flags
2890 Bytecodes::Code code = bytecode(); 2903 const Bytecodes::Code code = bytecode();
2891 const bool is_invokeinterface = code == Bytecodes::_invokeinterface; 2904 const bool is_invokeinterface = code == Bytecodes::_invokeinterface;
2892 const bool is_invokedynamic = code == Bytecodes::_invokedynamic; 2905 const bool is_invokedynamic = code == Bytecodes::_invokedynamic;
2906 const bool is_invokehandle = code == Bytecodes::_invokehandle;
2893 const bool is_invokevirtual = code == Bytecodes::_invokevirtual; 2907 const bool is_invokevirtual = code == Bytecodes::_invokevirtual;
2894 const bool is_invokespecial = code == Bytecodes::_invokespecial; 2908 const bool is_invokespecial = code == Bytecodes::_invokespecial;
2895 const bool load_receiver = (code != Bytecodes::_invokestatic && code != Bytecodes::_invokedynamic); 2909 const bool load_receiver = (recv != noreg);
2896 const bool receiver_null_check = is_invokespecial; 2910 const bool save_flags = (flags != noreg);
2897 const bool save_flags = is_invokeinterface || is_invokevirtual; 2911 assert(load_receiver == (code != Bytecodes::_invokestatic && code != Bytecodes::_invokedynamic), "");
2912 assert(save_flags == (is_invokeinterface || is_invokevirtual), "need flags for vfinal");
2913 assert(flags == noreg || flags == rdx, "");
2914 assert(recv == noreg || recv == rcx, "");
2915
2898 // setup registers & access constant pool cache 2916 // setup registers & access constant pool cache
2899 const Register recv = rcx; 2917 if (recv == noreg) recv = rcx;
2900 const Register flags = rdx; 2918 if (flags == noreg) flags = rdx;
2901 assert_different_registers(method, index, recv, flags); 2919 assert_different_registers(method, index, recv, flags);
2902 2920
2903 // save 'interpreter return address' 2921 // save 'interpreter return address'
2904 __ save_bcp(); 2922 __ save_bcp();
2905 2923
2906 load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual, false, is_invokedynamic); 2924 load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual, false, is_invokedynamic);
2907 2925
2926 // maybe push appendix to arguments (just before return address)
2927 if (is_invokedynamic || is_invokehandle) {
2928 Label L_no_push;
2929 __ verify_oop(index);
2930 __ testl(flags, (1 << ConstantPoolCacheEntry::has_appendix_shift));
2931 __ jccb(Assembler::zero, L_no_push);
2932 // Push the appendix as a trailing parameter.
2933 // This must be done before we get the receiver,
2934 // since the parameter_size includes it.
2935 __ push(index); // push appendix (MethodType, CallSite, etc.)
2936 __ bind(L_no_push);
2937 }
2938
2908 // load receiver if needed (note: no return address pushed yet) 2939 // load receiver if needed (note: no return address pushed yet)
2909 if (load_receiver) { 2940 if (load_receiver) {
2910 assert(!is_invokedynamic, "");
2911 __ movl(recv, flags); 2941 __ movl(recv, flags);
2912 __ andl(recv, 0xFF); 2942 __ andl(recv, ConstantPoolCacheEntry::parameter_size_mask);
2913 // recv count is 0 based? 2943 const int no_return_pc_pushed_yet = -1; // argument slot correction before we push return address
2914 Address recv_addr(rsp, recv, Interpreter::stackElementScale(), -Interpreter::expr_offset_in_bytes(1)); 2944 const int receiver_is_at_end = -1; // back off one slot to get receiver
2945 Address recv_addr = __ argument_address(recv, no_return_pc_pushed_yet + receiver_is_at_end);
2915 __ movptr(recv, recv_addr); 2946 __ movptr(recv, recv_addr);
2916 __ verify_oop(recv); 2947 __ verify_oop(recv);
2917 } 2948 }
2918 2949
2919 // do null check if needed
2920 if (receiver_null_check) {
2921 __ null_check(recv);
2922 }
2923
2924 if (save_flags) { 2950 if (save_flags) {
2925 __ mov(rsi, flags); 2951 __ mov(rsi, flags);
2926 } 2952 }
2927 2953
2928 // compute return type 2954 // compute return type
2929 __ shrl(flags, ConstantPoolCacheEntry::tosBits); 2955 __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2930 // Make sure we don't need to mask flags for tosBits after the above shift 2956 // Make sure we don't need to mask flags after the above shift
2931 ConstantPoolCacheEntry::verify_tosBits(); 2957 ConstantPoolCacheEntry::verify_tos_state_shift();
2932 // load return address 2958 // load return address
2933 { 2959 {
2934 address table_addr; 2960 const address table_addr = (is_invokeinterface || is_invokedynamic) ?
2935 if (is_invokeinterface || is_invokedynamic) 2961 (address)Interpreter::return_5_addrs_by_index_table() :
2936 table_addr = (address)Interpreter::return_5_addrs_by_index_table(); 2962 (address)Interpreter::return_3_addrs_by_index_table();
2937 else
2938 table_addr = (address)Interpreter::return_3_addrs_by_index_table();
2939 ExternalAddress table(table_addr); 2963 ExternalAddress table(table_addr);
2940 __ movptr(flags, ArrayAddress(table, Address(noreg, flags, Address::times_ptr))); 2964 __ movptr(flags, ArrayAddress(table, Address(noreg, flags, Address::times_ptr)));
2941 } 2965 }
2942 2966
2943 // push return address 2967 // push return address
2944 __ push(flags); 2968 __ push(flags);
2945 2969
2946 // Restore flag value from the constant pool cache, and restore rsi 2970 // Restore flags value from the constant pool cache, and restore rsi
2947 // for later null checks. rsi is the bytecode pointer 2971 // for later null checks. rsi is the bytecode pointer
2948 if (save_flags) { 2972 if (save_flags) {
2949 __ mov(flags, rsi); 2973 __ mov(flags, rsi);
2950 __ restore_bcp(); 2974 __ restore_bcp();
2951 } 2975 }
2952 } 2976 }
2953 2977
2954 2978
2955 void TemplateTable::invokevirtual_helper(Register index, Register recv, 2979 void TemplateTable::invokevirtual_helper(Register index,
2956 Register flags) { 2980 Register recv,
2957 2981 Register flags) {
2958 // Uses temporary registers rax, rdx 2982 // Uses temporary registers rax, rdx
2959 assert_different_registers(index, recv, rax, rdx); 2983 assert_different_registers(index, recv, rax, rdx);
2984 assert(index == rbx, "");
2985 assert(recv == rcx, "");
2960 2986
2961 // Test for an invoke of a final method 2987 // Test for an invoke of a final method
2962 Label notFinal; 2988 Label notFinal;
2963 __ movl(rax, flags); 2989 __ movl(rax, flags);
2964 __ andl(rax, (1 << ConstantPoolCacheEntry::vfinalMethod)); 2990 __ andl(rax, (1 << ConstantPoolCacheEntry::is_vfinal_shift));
2965 __ jcc(Assembler::zero, notFinal); 2991 __ jcc(Assembler::zero, notFinal);
2966 2992
2967 Register method = index; // method must be rbx, 2993 const Register method = index; // method must be rbx
2968 assert(method == rbx, "methodOop must be rbx, for interpreter calling convention"); 2994 assert(method == rbx,
2995 "methodOop must be rbx for interpreter calling convention");
2969 2996
2970 // do the call - the index is actually the method to call 2997 // do the call - the index is actually the method to call
2998 // that is, f2 is a vtable index if !is_vfinal, else f2 is a methodOop
2971 __ verify_oop(method); 2999 __ verify_oop(method);
2972 3000
2973 // It's final, need a null check here! 3001 // It's final, need a null check here!
2974 __ null_check(recv); 3002 __ null_check(recv);
2975 3003
2980 3008
2981 __ bind(notFinal); 3009 __ bind(notFinal);
2982 3010
2983 // get receiver klass 3011 // get receiver klass
2984 __ null_check(recv, oopDesc::klass_offset_in_bytes()); 3012 __ null_check(recv, oopDesc::klass_offset_in_bytes());
2985 // Keep recv in rcx for callee expects it there
2986 __ load_klass(rax, recv); 3013 __ load_klass(rax, recv);
2987 __ verify_oop(rax); 3014 __ verify_oop(rax);
2988 3015
2989 // profile this call 3016 // profile this call
2990 __ profile_virtual_call(rax, rdi, rdx); 3017 __ profile_virtual_call(rax, rdi, rdx);
2991 3018
2992 // get target methodOop & entry point 3019 // get target methodOop & entry point
2993 const int base = instanceKlass::vtable_start_offset() * wordSize; 3020 __ lookup_virtual_method(rax, index, method);
2994 assert(vtableEntry::size() * wordSize == 4, "adjust the scaling in the code below");
2995 __ movptr(method, Address(rax, index, Address::times_ptr, base + vtableEntry::method_offset_in_bytes()));
2996 __ jump_from_interpreted(method, rdx); 3021 __ jump_from_interpreted(method, rdx);
2997 } 3022 }
2998 3023
2999 3024
3000 void TemplateTable::invokevirtual(int byte_no) { 3025 void TemplateTable::invokevirtual(int byte_no) {
3001 transition(vtos, vtos); 3026 transition(vtos, vtos);
3002 assert(byte_no == f2_byte, "use this argument"); 3027 assert(byte_no == f2_byte, "use this argument");
3003 prepare_invoke(rbx, noreg, byte_no); 3028 prepare_invoke(byte_no,
3004 3029 rbx, // method or vtable index
3005 // rbx,: index 3030 noreg, // unused itable index
3031 rcx, rdx); // recv, flags
3032
3033 // rbx: index
3006 // rcx: receiver 3034 // rcx: receiver
3007 // rdx: flags 3035 // rdx: flags
3008 3036
3009 invokevirtual_helper(rbx, rcx, rdx); 3037 invokevirtual_helper(rbx, rcx, rdx);
3010 } 3038 }
3011 3039
3012 3040
3013 void TemplateTable::invokespecial(int byte_no) { 3041 void TemplateTable::invokespecial(int byte_no) {
3014 transition(vtos, vtos); 3042 transition(vtos, vtos);
3015 assert(byte_no == f1_byte, "use this argument"); 3043 assert(byte_no == f1_byte, "use this argument");
3016 prepare_invoke(rbx, noreg, byte_no); 3044 prepare_invoke(byte_no, rbx, noreg, // get f1 methodOop
3045 rcx); // get receiver also for null check
3046 __ verify_oop(rcx);
3047 __ null_check(rcx);
3017 // do the call 3048 // do the call
3018 __ verify_oop(rbx); 3049 __ verify_oop(rbx);
3019 __ profile_call(rax); 3050 __ profile_call(rax);
3020 __ jump_from_interpreted(rbx, rax); 3051 __ jump_from_interpreted(rbx, rax);
3021 } 3052 }
3022 3053
3023 3054
3024 void TemplateTable::invokestatic(int byte_no) { 3055 void TemplateTable::invokestatic(int byte_no) {
3025 transition(vtos, vtos); 3056 transition(vtos, vtos);
3026 assert(byte_no == f1_byte, "use this argument"); 3057 assert(byte_no == f1_byte, "use this argument");
3027 prepare_invoke(rbx, noreg, byte_no); 3058 prepare_invoke(byte_no, rbx); // get f1 methodOop
3028 // do the call 3059 // do the call
3029 __ verify_oop(rbx); 3060 __ verify_oop(rbx);
3030 __ profile_call(rax); 3061 __ profile_call(rax);
3031 __ jump_from_interpreted(rbx, rax); 3062 __ jump_from_interpreted(rbx, rax);
3032 } 3063 }
3040 3071
3041 3072
3042 void TemplateTable::invokeinterface(int byte_no) { 3073 void TemplateTable::invokeinterface(int byte_no) {
3043 transition(vtos, vtos); 3074 transition(vtos, vtos);
3044 assert(byte_no == f1_byte, "use this argument"); 3075 assert(byte_no == f1_byte, "use this argument");
3045 prepare_invoke(rax, rbx, byte_no); 3076 prepare_invoke(byte_no, rax, rbx, // get f1 klassOop, f2 itable index
3046 3077 rcx, rdx); // recv, flags
3047 // rax,: Interface 3078
3048 // rbx,: index 3079 // rax: interface klass (from f1)
3080 // rbx: itable index (from f2)
3049 // rcx: receiver 3081 // rcx: receiver
3050 // rdx: flags 3082 // rdx: flags
3051 3083
3052 // Special case of invokeinterface called for virtual method of 3084 // Special case of invokeinterface called for virtual method of
3053 // java.lang.Object. See cpCacheOop.cpp for details. 3085 // java.lang.Object. See cpCacheOop.cpp for details.
3054 // This code isn't produced by javac, but could be produced by 3086 // This code isn't produced by javac, but could be produced by
3055 // another compliant java compiler. 3087 // another compliant java compiler.
3056 Label notMethod; 3088 Label notMethod;
3057 __ movl(rdi, rdx); 3089 __ movl(rdi, rdx);
3058 __ andl(rdi, (1 << ConstantPoolCacheEntry::methodInterface)); 3090 __ andl(rdi, (1 << ConstantPoolCacheEntry::is_forced_virtual_shift));
3059 __ jcc(Assembler::zero, notMethod); 3091 __ jcc(Assembler::zero, notMethod);
3060 3092
3061 invokevirtual_helper(rbx, rcx, rdx); 3093 invokevirtual_helper(rbx, rcx, rdx);
3062 __ bind(notMethod); 3094 __ bind(notMethod);
3063 3095
3064 // Get receiver klass into rdx - also a null check 3096 // Get receiver klass into rdx - also a null check
3065 __ restore_locals(); // restore rdi 3097 __ restore_locals(); // restore rdi
3098 __ null_check(rcx, oopDesc::klass_offset_in_bytes());
3066 __ load_klass(rdx, rcx); 3099 __ load_klass(rdx, rcx);
3067 __ verify_oop(rdx); 3100 __ verify_oop(rdx);
3068 3101
3069 // profile this call 3102 // profile this call
3070 __ profile_virtual_call(rdx, rsi, rdi); 3103 __ profile_virtual_call(rdx, rsi, rdi);
3075 rdx, rax, rbx, 3108 rdx, rax, rbx,
3076 // outputs: method, scan temp. reg 3109 // outputs: method, scan temp. reg
3077 rbx, rsi, 3110 rbx, rsi,
3078 no_such_interface); 3111 no_such_interface);
3079 3112
3080 // rbx,: methodOop to call 3113 // rbx: methodOop to call
3081 // rcx: receiver 3114 // rcx: receiver
3082 // Check for abstract method error 3115 // Check for abstract method error
3083 // Note: This should be done more efficiently via a throw_abstract_method_error 3116 // Note: This should be done more efficiently via a throw_abstract_method_error
3084 // interpreter entry point and a conditional jump to it in case of a null 3117 // interpreter entry point and a conditional jump to it in case of a null
3085 // method. 3118 // method.
3114 InterpreterRuntime::throw_IncompatibleClassChangeError)); 3147 InterpreterRuntime::throw_IncompatibleClassChangeError));
3115 // the call_VM checks for exception, so we should never return here. 3148 // the call_VM checks for exception, so we should never return here.
3116 __ should_not_reach_here(); 3149 __ should_not_reach_here();
3117 } 3150 }
3118 3151
3152 void TemplateTable::invokehandle(int byte_no) {
3153 transition(vtos, vtos);
3154 assert(byte_no == f12_oop, "use this argument");
3155 const Register rbx_method = rbx; // (from f2)
3156 const Register rax_mtype = rax; // (from f1)
3157 const Register rcx_recv = rcx;
3158 const Register rdx_flags = rdx;
3159
3160 if (!EnableInvokeDynamic) {
3161 // rewriter does not generate this bytecode
3162 __ should_not_reach_here();
3163 return;
3164 }
3165
3166 prepare_invoke(byte_no,
3167 rbx_method, rax_mtype, // get f2 methodOop, f1 MethodType
3168 rcx_recv);
3169 __ verify_oop(rbx_method);
3170 __ verify_oop(rcx_recv);
3171 __ null_check(rcx_recv);
3172
3173 // Note: rax_mtype is already pushed (if necessary) by prepare_invoke
3174
3175 // FIXME: profile the LambdaForm also
3176 __ profile_final_call(rax);
3177
3178 __ jump_from_interpreted(rbx_method, rdx);
3179 }
3180
3181
3119 void TemplateTable::invokedynamic(int byte_no) { 3182 void TemplateTable::invokedynamic(int byte_no) {
3120 transition(vtos, vtos); 3183 transition(vtos, vtos);
3121 assert(byte_no == f1_oop, "use this argument"); 3184 assert(byte_no == f12_oop, "use this argument");
3122 3185
3123 if (!EnableInvokeDynamic) { 3186 if (!EnableInvokeDynamic) {
3124 // We should not encounter this bytecode if !EnableInvokeDynamic. 3187 // We should not encounter this bytecode if !EnableInvokeDynamic.
3125 // The verifier will stop it. However, if we get past the verifier, 3188 // The verifier will stop it. However, if we get past the verifier,
3126 // this will stop the thread in a reasonable way, without crashing the JVM. 3189 // this will stop the thread in a reasonable way, without crashing the JVM.
3129 // the call_VM checks for exception, so we should never return here. 3192 // the call_VM checks for exception, so we should never return here.
3130 __ should_not_reach_here(); 3193 __ should_not_reach_here();
3131 return; 3194 return;
3132 } 3195 }
3133 3196
3134 prepare_invoke(rax, rbx, byte_no); 3197 const Register rbx_method = rbx;
3135 3198 const Register rax_callsite = rax;
3136 // rax: CallSite object (f1) 3199
3137 // rbx: unused (f2) 3200 prepare_invoke(byte_no, rbx_method, rax_callsite);
3138 // rcx: receiver address 3201
3139 // rdx: flags (unused) 3202 // rax: CallSite object (from f1)
3140 3203 // rbx: MH.linkToCallSite method (from f2)
3141 Register rax_callsite = rax; 3204
3142 Register rcx_method_handle = rcx; 3205 // Note: rax_callsite is already pushed by prepare_invoke
3143 3206
3144 // %%% should make a type profile for any invokedynamic that takes a ref argument 3207 // %%% should make a type profile for any invokedynamic that takes a ref argument
3145 // profile this call 3208 // profile this call
3146 __ profile_call(rsi); 3209 __ profile_call(rsi);
3147 3210
3148 __ verify_oop(rax_callsite); 3211 __ verify_oop(rax_callsite);
3149 __ load_heap_oop(rcx_method_handle, Address(rax_callsite, __ delayed_value(java_lang_invoke_CallSite::target_offset_in_bytes, rdx))); 3212
3150 __ null_check(rcx_method_handle); 3213 __ jump_from_interpreted(rbx_method, rdx);
3151 __ verify_oop(rcx_method_handle);
3152 __ prepare_to_jump_from_interpreted();
3153 __ jump_to_method_handle_entry(rcx_method_handle, rdx);
3154 } 3214 }
3155 3215
3156 //---------------------------------------------------------------------------------------------------- 3216 //----------------------------------------------------------------------------------------------------
3157 // Allocation 3217 // Allocation
3158 3218