Mercurial > hg > truffle
comparison src/cpu/x86/vm/templateTable_x86_64.cpp @ 1565:ab102d5d923e
6939207: refactor constant pool index processing
Summary: Factored cleanup of instruction decode which prepares for enhanced ldc semantics.
Reviewed-by: twisti
author | jrose |
---|---|
date | Sun, 23 May 2010 01:38:26 -0700 |
parents | 2338d41fbd81 |
children | e9ff18c4ace7 |
comparison
equal
deleted
inserted
replaced
1564:61b2245abf36 | 1565:ab102d5d923e |
---|---|
2013 if (os::is_MP()) { // Not needed on single CPU | 2013 if (os::is_MP()) { // Not needed on single CPU |
2014 __ membar(order_constraint); | 2014 __ membar(order_constraint); |
2015 } | 2015 } |
2016 } | 2016 } |
2017 | 2017 |
2018 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) { | 2018 void TemplateTable::resolve_cache_and_index(int byte_no, |
2019 assert(byte_no == 1 || byte_no == 2, "byte_no out of range"); | 2019 Register result, |
2020 bool is_invokedynamic = (bytecode() == Bytecodes::_invokedynamic); | 2020 Register Rcache, |
2021 | 2021 Register index, |
2022 size_t index_size) { | |
2022 const Register temp = rbx; | 2023 const Register temp = rbx; |
2023 assert_different_registers(Rcache, index, temp); | 2024 assert_different_registers(result, Rcache, index, temp); |
2024 | 2025 |
2025 const int shift_count = (1 + byte_no) * BitsPerByte; | |
2026 Label resolved; | 2026 Label resolved; |
2027 __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic); | 2027 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); |
2028 if (is_invokedynamic) { | 2028 if (byte_no == f1_oop) { |
2029 // we are resolved if the f1 field contains a non-null CallSite object | 2029 // We are resolved if the f1 field contains a non-null object (CallSite, etc.) |
2030 __ cmpptr(Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()), (int32_t) NULL_WORD); | 2030 // This kind of CP cache entry does not need to match the flags byte, because |
2031 // there is a 1-1 relation between bytecode type and CP entry type. | |
2032 assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD) | |
2033 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset())); | |
2034 __ testptr(result, result); | |
2031 __ jcc(Assembler::notEqual, resolved); | 2035 __ jcc(Assembler::notEqual, resolved); |
2032 } else { | 2036 } else { |
2037 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); | |
2038 assert(result == noreg, ""); //else change code for setting result | |
2039 const int shift_count = (1 + byte_no) * BitsPerByte; | |
2033 __ movl(temp, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset())); | 2040 __ movl(temp, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset())); |
2034 __ shrl(temp, shift_count); | 2041 __ shrl(temp, shift_count); |
2035 // have we resolved this bytecode? | 2042 // have we resolved this bytecode? |
2036 __ andl(temp, 0xFF); | 2043 __ andl(temp, 0xFF); |
2037 __ cmpl(temp, (int) bytecode()); | 2044 __ cmpl(temp, (int) bytecode()); |
2062 } | 2069 } |
2063 __ movl(temp, (int) bytecode()); | 2070 __ movl(temp, (int) bytecode()); |
2064 __ call_VM(noreg, entry, temp); | 2071 __ call_VM(noreg, entry, temp); |
2065 | 2072 |
2066 // Update registers with resolved info | 2073 // Update registers with resolved info |
2067 __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic); | 2074 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); |
2075 if (result != noreg) | |
2076 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset())); | |
2068 __ bind(resolved); | 2077 __ bind(resolved); |
2069 } | 2078 } |
2070 | 2079 |
2071 // The Rcache and index registers must be set before call | 2080 // The Rcache and index registers must be set before call |
2072 void TemplateTable::load_field_cp_cache_entry(Register obj, | 2081 void TemplateTable::load_field_cp_cache_entry(Register obj, |
2098 void TemplateTable::load_invoke_cp_cache_entry(int byte_no, | 2107 void TemplateTable::load_invoke_cp_cache_entry(int byte_no, |
2099 Register method, | 2108 Register method, |
2100 Register itable_index, | 2109 Register itable_index, |
2101 Register flags, | 2110 Register flags, |
2102 bool is_invokevirtual, | 2111 bool is_invokevirtual, |
2103 bool is_invokevfinal /*unused*/) { | 2112 bool is_invokevfinal, /*unused*/ |
2113 bool is_invokedynamic) { | |
2104 // setup registers | 2114 // setup registers |
2105 const Register cache = rcx; | 2115 const Register cache = rcx; |
2106 const Register index = rdx; | 2116 const Register index = rdx; |
2107 assert_different_registers(method, flags); | 2117 assert_different_registers(method, flags); |
2108 assert_different_registers(method, cache, index); | 2118 assert_different_registers(method, cache, index); |
2118 ConstantPoolCacheEntry::flags_offset()); | 2128 ConstantPoolCacheEntry::flags_offset()); |
2119 // access constant pool cache fields | 2129 // access constant pool cache fields |
2120 const int index_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + | 2130 const int index_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + |
2121 ConstantPoolCacheEntry::f2_offset()); | 2131 ConstantPoolCacheEntry::f2_offset()); |
2122 | 2132 |
2123 resolve_cache_and_index(byte_no, cache, index); | 2133 if (byte_no == f1_oop) { |
2124 | 2134 // Resolved f1_oop goes directly into 'method' register. |
2125 assert(wordSize == 8, "adjust code below"); | 2135 assert(is_invokedynamic, ""); |
2126 __ movptr(method, Address(cache, index, Address::times_8, method_offset)); | 2136 resolve_cache_and_index(byte_no, method, cache, index, sizeof(u4)); |
2137 } else { | |
2138 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); | |
2139 __ movptr(method, Address(cache, index, Address::times_ptr, method_offset)); | |
2140 } | |
2127 if (itable_index != noreg) { | 2141 if (itable_index != noreg) { |
2128 __ movptr(itable_index, | 2142 __ movptr(itable_index, Address(cache, index, Address::times_ptr, index_offset)); |
2129 Address(cache, index, Address::times_8, index_offset)); | 2143 } |
2130 } | 2144 __ movl(flags, Address(cache, index, Address::times_ptr, flags_offset)); |
2131 __ movl(flags , Address(cache, index, Address::times_8, flags_offset)); | |
2132 } | 2145 } |
2133 | 2146 |
2134 | 2147 |
2135 // The registers cache and index expected to be set before call. | 2148 // The registers cache and index expected to be set before call. |
2136 // Correct values of the cache and index registers are preserved. | 2149 // Correct values of the cache and index registers are preserved. |
2185 const Register obj = c_rarg3; | 2198 const Register obj = c_rarg3; |
2186 const Register off = rbx; | 2199 const Register off = rbx; |
2187 const Register flags = rax; | 2200 const Register flags = rax; |
2188 const Register bc = c_rarg3; // uses same reg as obj, so don't mix them | 2201 const Register bc = c_rarg3; // uses same reg as obj, so don't mix them |
2189 | 2202 |
2190 resolve_cache_and_index(byte_no, cache, index); | 2203 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); |
2191 jvmti_post_field_access(cache, index, is_static, false); | 2204 jvmti_post_field_access(cache, index, is_static, false); |
2192 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); | 2205 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); |
2193 | 2206 |
2194 if (!is_static) { | 2207 if (!is_static) { |
2195 // obj is on the stack | 2208 // obj is on the stack |
2388 const Register obj = rcx; | 2401 const Register obj = rcx; |
2389 const Register off = rbx; | 2402 const Register off = rbx; |
2390 const Register flags = rax; | 2403 const Register flags = rax; |
2391 const Register bc = c_rarg3; | 2404 const Register bc = c_rarg3; |
2392 | 2405 |
2393 resolve_cache_and_index(byte_no, cache, index); | 2406 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); |
2394 jvmti_post_field_mod(cache, index, is_static); | 2407 jvmti_post_field_mod(cache, index, is_static); |
2395 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); | 2408 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); |
2396 | 2409 |
2397 // [jk] not needed currently | 2410 // [jk] not needed currently |
2398 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore | | 2411 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore | |
2813 assert_different_registers(method, index, recv, flags); | 2826 assert_different_registers(method, index, recv, flags); |
2814 | 2827 |
2815 // save 'interpreter return address' | 2828 // save 'interpreter return address' |
2816 __ save_bcp(); | 2829 __ save_bcp(); |
2817 | 2830 |
2818 load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual); | 2831 load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual, false, is_invokedynamic); |
2819 | 2832 |
2820 // load receiver if needed (note: no return address pushed yet) | 2833 // load receiver if needed (note: no return address pushed yet) |
2821 if (load_receiver) { | 2834 if (load_receiver) { |
2835 assert(!is_invokedynamic, ""); | |
2822 __ movl(recv, flags); | 2836 __ movl(recv, flags); |
2823 __ andl(recv, 0xFF); | 2837 __ andl(recv, 0xFF); |
2824 Address recv_addr(rsp, recv, Address::times_8, -Interpreter::expr_offset_in_bytes(1)); | 2838 Address recv_addr(rsp, recv, Address::times_8, -Interpreter::expr_offset_in_bytes(1)); |
2825 __ movptr(recv, recv_addr); | 2839 __ movptr(recv, recv_addr); |
2826 __ verify_oop(recv); | 2840 __ verify_oop(recv); |
2912 } | 2926 } |
2913 | 2927 |
2914 | 2928 |
2915 void TemplateTable::invokevirtual(int byte_no) { | 2929 void TemplateTable::invokevirtual(int byte_no) { |
2916 transition(vtos, vtos); | 2930 transition(vtos, vtos); |
2931 assert(byte_no == f2_byte, "use this argument"); | |
2917 prepare_invoke(rbx, noreg, byte_no); | 2932 prepare_invoke(rbx, noreg, byte_no); |
2918 | 2933 |
2919 // rbx: index | 2934 // rbx: index |
2920 // rcx: receiver | 2935 // rcx: receiver |
2921 // rdx: flags | 2936 // rdx: flags |
2924 } | 2939 } |
2925 | 2940 |
2926 | 2941 |
2927 void TemplateTable::invokespecial(int byte_no) { | 2942 void TemplateTable::invokespecial(int byte_no) { |
2928 transition(vtos, vtos); | 2943 transition(vtos, vtos); |
2944 assert(byte_no == f1_byte, "use this argument"); | |
2929 prepare_invoke(rbx, noreg, byte_no); | 2945 prepare_invoke(rbx, noreg, byte_no); |
2930 // do the call | 2946 // do the call |
2931 __ verify_oop(rbx); | 2947 __ verify_oop(rbx); |
2932 __ profile_call(rax); | 2948 __ profile_call(rax); |
2933 __ jump_from_interpreted(rbx, rax); | 2949 __ jump_from_interpreted(rbx, rax); |
2934 } | 2950 } |
2935 | 2951 |
2936 | 2952 |
2937 void TemplateTable::invokestatic(int byte_no) { | 2953 void TemplateTable::invokestatic(int byte_no) { |
2938 transition(vtos, vtos); | 2954 transition(vtos, vtos); |
2955 assert(byte_no == f1_byte, "use this argument"); | |
2939 prepare_invoke(rbx, noreg, byte_no); | 2956 prepare_invoke(rbx, noreg, byte_no); |
2940 // do the call | 2957 // do the call |
2941 __ verify_oop(rbx); | 2958 __ verify_oop(rbx); |
2942 __ profile_call(rax); | 2959 __ profile_call(rax); |
2943 __ jump_from_interpreted(rbx, rax); | 2960 __ jump_from_interpreted(rbx, rax); |
2944 } | 2961 } |
2945 | 2962 |
2946 void TemplateTable::fast_invokevfinal(int byte_no) { | 2963 void TemplateTable::fast_invokevfinal(int byte_no) { |
2947 transition(vtos, vtos); | 2964 transition(vtos, vtos); |
2965 assert(byte_no == f2_byte, "use this argument"); | |
2948 __ stop("fast_invokevfinal not used on amd64"); | 2966 __ stop("fast_invokevfinal not used on amd64"); |
2949 } | 2967 } |
2950 | 2968 |
2951 void TemplateTable::invokeinterface(int byte_no) { | 2969 void TemplateTable::invokeinterface(int byte_no) { |
2952 transition(vtos, vtos); | 2970 transition(vtos, vtos); |
2971 assert(byte_no == f1_byte, "use this argument"); | |
2953 prepare_invoke(rax, rbx, byte_no); | 2972 prepare_invoke(rax, rbx, byte_no); |
2954 | 2973 |
2955 // rax: Interface | 2974 // rax: Interface |
2956 // rbx: index | 2975 // rbx: index |
2957 // rcx: receiver | 2976 // rcx: receiver |
3025 return; | 3044 return; |
3026 } | 3045 } |
3027 | 3046 |
3028 void TemplateTable::invokedynamic(int byte_no) { | 3047 void TemplateTable::invokedynamic(int byte_no) { |
3029 transition(vtos, vtos); | 3048 transition(vtos, vtos); |
3049 assert(byte_no == f1_oop, "use this argument"); | |
3030 | 3050 |
3031 if (!EnableInvokeDynamic) { | 3051 if (!EnableInvokeDynamic) { |
3032 // We should not encounter this bytecode if !EnableInvokeDynamic. | 3052 // We should not encounter this bytecode if !EnableInvokeDynamic. |
3033 // The verifier will stop it. However, if we get past the verifier, | 3053 // The verifier will stop it. However, if we get past the verifier, |
3034 // this will stop the thread in a reasonable way, without crashing the JVM. | 3054 // this will stop the thread in a reasonable way, without crashing the JVM. |
3037 // the call_VM checks for exception, so we should never return here. | 3057 // the call_VM checks for exception, so we should never return here. |
3038 __ should_not_reach_here(); | 3058 __ should_not_reach_here(); |
3039 return; | 3059 return; |
3040 } | 3060 } |
3041 | 3061 |
3062 assert(byte_no == f1_oop, "use this argument"); | |
3042 prepare_invoke(rax, rbx, byte_no); | 3063 prepare_invoke(rax, rbx, byte_no); |
3043 | 3064 |
3044 // rax: CallSite object (f1) | 3065 // rax: CallSite object (f1) |
3045 // rbx: unused (f2) | 3066 // rbx: unused (f2) |
3046 // rcx: receiver address | 3067 // rcx: receiver address |