Mercurial > hg > graal-jvmci-8
comparison src/cpu/x86/vm/templateTable_x86_32.cpp @ 1579:e9ff18c4ace7
Merge
author | jrose |
---|---|
date | Wed, 02 Jun 2010 22:45:42 -0700 |
parents | c18cbe5936b8 ab102d5d923e |
children | 136b78722a08 |
comparison
equal
deleted
inserted
replaced
1562:dfe27f03244a | 1579:e9ff18c4ace7 |
---|---|
2010 // Helper function to insert a is-volatile test and memory barrier | 2010 // Helper function to insert a is-volatile test and memory barrier |
2011 if( !os::is_MP() ) return; // Not needed on single CPU | 2011 if( !os::is_MP() ) return; // Not needed on single CPU |
2012 __ membar(order_constraint); | 2012 __ membar(order_constraint); |
2013 } | 2013 } |
2014 | 2014 |
2015 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) { | 2015 void TemplateTable::resolve_cache_and_index(int byte_no, |
2016 assert(byte_no == 1 || byte_no == 2, "byte_no out of range"); | 2016 Register result, |
2017 bool is_invokedynamic = (bytecode() == Bytecodes::_invokedynamic); | 2017 Register Rcache, |
2018 | 2018 Register index, |
2019 size_t index_size) { | |
2019 Register temp = rbx; | 2020 Register temp = rbx; |
2020 | 2021 |
2021 assert_different_registers(Rcache, index, temp); | 2022 assert_different_registers(result, Rcache, index, temp); |
2022 | 2023 |
2023 const int shift_count = (1 + byte_no)*BitsPerByte; | |
2024 Label resolved; | 2024 Label resolved; |
2025 __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic); | 2025 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); |
2026 if (is_invokedynamic) { | 2026 if (byte_no == f1_oop) { |
2027 // we are resolved if the f1 field contains a non-null CallSite object | 2027 // We are resolved if the f1 field contains a non-null object (CallSite, etc.) |
2028 __ cmpptr(Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()), (int32_t) NULL_WORD); | 2028 // This kind of CP cache entry does not need to match the flags byte, because |
2029 // there is a 1-1 relation between bytecode type and CP entry type. | |
2030 assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD) | |
2031 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset())); | |
2032 __ testptr(result, result); | |
2029 __ jcc(Assembler::notEqual, resolved); | 2033 __ jcc(Assembler::notEqual, resolved); |
2030 } else { | 2034 } else { |
2035 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); | |
2036 assert(result == noreg, ""); //else change code for setting result | |
2037 const int shift_count = (1 + byte_no)*BitsPerByte; | |
2031 __ movl(temp, Address(Rcache, index, Address::times_4, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset())); | 2038 __ movl(temp, Address(Rcache, index, Address::times_4, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset())); |
2032 __ shrl(temp, shift_count); | 2039 __ shrl(temp, shift_count); |
2033 // have we resolved this bytecode? | 2040 // have we resolved this bytecode? |
2034 __ andl(temp, 0xFF); | 2041 __ andl(temp, 0xFF); |
2035 __ cmpl(temp, (int)bytecode()); | 2042 __ cmpl(temp, (int)bytecode()); |
2051 default : ShouldNotReachHere(); break; | 2058 default : ShouldNotReachHere(); break; |
2052 } | 2059 } |
2053 __ movl(temp, (int)bytecode()); | 2060 __ movl(temp, (int)bytecode()); |
2054 __ call_VM(noreg, entry, temp); | 2061 __ call_VM(noreg, entry, temp); |
2055 // Update registers with resolved info | 2062 // Update registers with resolved info |
2056 __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic); | 2063 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); |
2064 if (result != noreg) | |
2065 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset())); | |
2057 __ bind(resolved); | 2066 __ bind(resolved); |
2058 } | 2067 } |
2059 | 2068 |
2060 | 2069 |
2061 // The cache and index registers must be set before call | 2070 // The cache and index registers must be set before call |
2085 void TemplateTable::load_invoke_cp_cache_entry(int byte_no, | 2094 void TemplateTable::load_invoke_cp_cache_entry(int byte_no, |
2086 Register method, | 2095 Register method, |
2087 Register itable_index, | 2096 Register itable_index, |
2088 Register flags, | 2097 Register flags, |
2089 bool is_invokevirtual, | 2098 bool is_invokevirtual, |
2090 bool is_invokevfinal /*unused*/) { | 2099 bool is_invokevfinal /*unused*/, |
2100 bool is_invokedynamic) { | |
2091 // setup registers | 2101 // setup registers |
2092 const Register cache = rcx; | 2102 const Register cache = rcx; |
2093 const Register index = rdx; | 2103 const Register index = rdx; |
2094 assert_different_registers(method, flags); | 2104 assert_different_registers(method, flags); |
2095 assert_different_registers(method, cache, index); | 2105 assert_different_registers(method, cache, index); |
2107 ConstantPoolCacheEntry::flags_offset()); | 2117 ConstantPoolCacheEntry::flags_offset()); |
2108 // access constant pool cache fields | 2118 // access constant pool cache fields |
2109 const int index_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + | 2119 const int index_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + |
2110 ConstantPoolCacheEntry::f2_offset()); | 2120 ConstantPoolCacheEntry::f2_offset()); |
2111 | 2121 |
2112 resolve_cache_and_index(byte_no, cache, index); | 2122 if (byte_no == f1_oop) { |
2113 | 2123 // Resolved f1_oop goes directly into 'method' register. |
2114 __ movptr(method, Address(cache, index, Address::times_ptr, method_offset)); | 2124 assert(is_invokedynamic, ""); |
2125 resolve_cache_and_index(byte_no, method, cache, index, sizeof(u4)); | |
2126 } else { | |
2127 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); | |
2128 __ movptr(method, Address(cache, index, Address::times_ptr, method_offset)); | |
2129 } | |
2115 if (itable_index != noreg) { | 2130 if (itable_index != noreg) { |
2116 __ movptr(itable_index, Address(cache, index, Address::times_ptr, index_offset)); | 2131 __ movptr(itable_index, Address(cache, index, Address::times_ptr, index_offset)); |
2117 } | 2132 } |
2118 __ movl(flags , Address(cache, index, Address::times_ptr, flags_offset )); | 2133 __ movl(flags, Address(cache, index, Address::times_ptr, flags_offset)); |
2119 } | 2134 } |
2120 | 2135 |
2121 | 2136 |
2122 // The registers cache and index expected to be set before call. | 2137 // The registers cache and index expected to be set before call. |
2123 // Correct values of the cache and index registers are preserved. | 2138 // Correct values of the cache and index registers are preserved. |
2167 const Register index = rdx; | 2182 const Register index = rdx; |
2168 const Register obj = rcx; | 2183 const Register obj = rcx; |
2169 const Register off = rbx; | 2184 const Register off = rbx; |
2170 const Register flags = rax; | 2185 const Register flags = rax; |
2171 | 2186 |
2172 resolve_cache_and_index(byte_no, cache, index); | 2187 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); |
2173 jvmti_post_field_access(cache, index, is_static, false); | 2188 jvmti_post_field_access(cache, index, is_static, false); |
2174 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); | 2189 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); |
2175 | 2190 |
2176 if (!is_static) pop_and_check_object(obj); | 2191 if (!is_static) pop_and_check_object(obj); |
2177 | 2192 |
2376 const Register index = rdx; | 2391 const Register index = rdx; |
2377 const Register obj = rcx; | 2392 const Register obj = rcx; |
2378 const Register off = rbx; | 2393 const Register off = rbx; |
2379 const Register flags = rax; | 2394 const Register flags = rax; |
2380 | 2395 |
2381 resolve_cache_and_index(byte_no, cache, index); | 2396 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); |
2382 jvmti_post_field_mod(cache, index, is_static); | 2397 jvmti_post_field_mod(cache, index, is_static); |
2383 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); | 2398 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); |
2384 | 2399 |
2385 // Doug Lea believes this is not needed with current Sparcs (TSO) and Intel (PSO). | 2400 // Doug Lea believes this is not needed with current Sparcs (TSO) and Intel (PSO). |
2386 // volatile_barrier( ); | 2401 // volatile_barrier( ); |
2813 assert_different_registers(method, index, recv, flags); | 2828 assert_different_registers(method, index, recv, flags); |
2814 | 2829 |
2815 // save 'interpreter return address' | 2830 // save 'interpreter return address' |
2816 __ save_bcp(); | 2831 __ save_bcp(); |
2817 | 2832 |
2818 load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual); | 2833 load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual, false, is_invokedynamic); |
2819 | 2834 |
2820 // load receiver if needed (note: no return address pushed yet) | 2835 // load receiver if needed (note: no return address pushed yet) |
2821 if (load_receiver) { | 2836 if (load_receiver) { |
2837 assert(!is_invokedynamic, ""); | |
2822 __ movl(recv, flags); | 2838 __ movl(recv, flags); |
2823 __ andl(recv, 0xFF); | 2839 __ andl(recv, 0xFF); |
2824 // recv count is 0 based? | 2840 // recv count is 0 based? |
2825 Address recv_addr(rsp, recv, Interpreter::stackElementScale(), -Interpreter::expr_offset_in_bytes(1)); | 2841 Address recv_addr(rsp, recv, Interpreter::stackElementScale(), -Interpreter::expr_offset_in_bytes(1)); |
2826 __ movptr(recv, recv_addr); | 2842 __ movptr(recv, recv_addr); |
2908 } | 2924 } |
2909 | 2925 |
2910 | 2926 |
2911 void TemplateTable::invokevirtual(int byte_no) { | 2927 void TemplateTable::invokevirtual(int byte_no) { |
2912 transition(vtos, vtos); | 2928 transition(vtos, vtos); |
2929 assert(byte_no == f2_byte, "use this argument"); | |
2913 prepare_invoke(rbx, noreg, byte_no); | 2930 prepare_invoke(rbx, noreg, byte_no); |
2914 | 2931 |
2915 // rbx,: index | 2932 // rbx,: index |
2916 // rcx: receiver | 2933 // rcx: receiver |
2917 // rdx: flags | 2934 // rdx: flags |
2920 } | 2937 } |
2921 | 2938 |
2922 | 2939 |
2923 void TemplateTable::invokespecial(int byte_no) { | 2940 void TemplateTable::invokespecial(int byte_no) { |
2924 transition(vtos, vtos); | 2941 transition(vtos, vtos); |
2942 assert(byte_no == f1_byte, "use this argument"); | |
2925 prepare_invoke(rbx, noreg, byte_no); | 2943 prepare_invoke(rbx, noreg, byte_no); |
2926 // do the call | 2944 // do the call |
2927 __ verify_oop(rbx); | 2945 __ verify_oop(rbx); |
2928 __ profile_call(rax); | 2946 __ profile_call(rax); |
2929 __ jump_from_interpreted(rbx, rax); | 2947 __ jump_from_interpreted(rbx, rax); |
2930 } | 2948 } |
2931 | 2949 |
2932 | 2950 |
2933 void TemplateTable::invokestatic(int byte_no) { | 2951 void TemplateTable::invokestatic(int byte_no) { |
2934 transition(vtos, vtos); | 2952 transition(vtos, vtos); |
2953 assert(byte_no == f1_byte, "use this argument"); | |
2935 prepare_invoke(rbx, noreg, byte_no); | 2954 prepare_invoke(rbx, noreg, byte_no); |
2936 // do the call | 2955 // do the call |
2937 __ verify_oop(rbx); | 2956 __ verify_oop(rbx); |
2938 __ profile_call(rax); | 2957 __ profile_call(rax); |
2939 __ jump_from_interpreted(rbx, rax); | 2958 __ jump_from_interpreted(rbx, rax); |
2940 } | 2959 } |
2941 | 2960 |
2942 | 2961 |
2943 void TemplateTable::fast_invokevfinal(int byte_no) { | 2962 void TemplateTable::fast_invokevfinal(int byte_no) { |
2944 transition(vtos, vtos); | 2963 transition(vtos, vtos); |
2964 assert(byte_no == f2_byte, "use this argument"); | |
2945 __ stop("fast_invokevfinal not used on x86"); | 2965 __ stop("fast_invokevfinal not used on x86"); |
2946 } | 2966 } |
2947 | 2967 |
2948 | 2968 |
2949 void TemplateTable::invokeinterface(int byte_no) { | 2969 void TemplateTable::invokeinterface(int byte_no) { |
2950 transition(vtos, vtos); | 2970 transition(vtos, vtos); |
2971 assert(byte_no == f1_byte, "use this argument"); | |
2951 prepare_invoke(rax, rbx, byte_no); | 2972 prepare_invoke(rax, rbx, byte_no); |
2952 | 2973 |
2953 // rax,: Interface | 2974 // rax,: Interface |
2954 // rbx,: index | 2975 // rbx,: index |
2955 // rcx: receiver | 2976 // rcx: receiver |
3034 // the call_VM checks for exception, so we should never return here. | 3055 // the call_VM checks for exception, so we should never return here. |
3035 __ should_not_reach_here(); | 3056 __ should_not_reach_here(); |
3036 return; | 3057 return; |
3037 } | 3058 } |
3038 | 3059 |
3060 assert(byte_no == f1_oop, "use this argument"); | |
3039 prepare_invoke(rax, rbx, byte_no); | 3061 prepare_invoke(rax, rbx, byte_no); |
3040 | 3062 |
3041 // rax: CallSite object (f1) | 3063 // rax: CallSite object (f1) |
3042 // rbx: unused (f2) | 3064 // rbx: unused (f2) |
3043 // rcx: receiver address | |
3044 // rdx: flags (unused) | 3065 // rdx: flags (unused) |
3045 | 3066 |
3046 if (ProfileInterpreter) { | 3067 if (ProfileInterpreter) { |
3047 Label L; | 3068 Label L; |
3048 // %%% should make a type profile for any invokedynamic that takes a ref argument | 3069 // %%% should make a type profile for any invokedynamic that takes a ref argument |