comparison src/cpu/sparc/vm/templateTable_sparc.cpp @ 1565:ab102d5d923e

6939207: refactor constant pool index processing Summary: Factored cleanup of instruction decode which prepares for enhanced ldc semantics. Reviewed-by: twisti
author jrose
date Sun, 23 May 2010 01:38:26 -0700
parents 2338d41fbd81
children e9ff18c4ace7
comparison
equal deleted inserted replaced
1564:61b2245abf36 1565:ab102d5d923e
1947 if ((order_constraint & Assembler::StoreLoad) == 0) return; 1947 if ((order_constraint & Assembler::StoreLoad) == 0) return;
1948 __ membar( order_constraint ); 1948 __ membar( order_constraint );
1949 } 1949 }
1950 1950
1951 // ---------------------------------------------------------------------------- 1951 // ----------------------------------------------------------------------------
1952 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) { 1952 void TemplateTable::resolve_cache_and_index(int byte_no,
1953 assert(byte_no == 1 || byte_no == 2, "byte_no out of range"); 1953 Register result,
1954 bool is_invokedynamic = (bytecode() == Bytecodes::_invokedynamic); 1954 Register Rcache,
1955 1955 Register index,
1956 size_t index_size) {
1956 // Depends on cpCacheOop layout! 1957 // Depends on cpCacheOop layout!
1957 const int shift_count = (1 + byte_no)*BitsPerByte;
1958 Label resolved; 1958 Label resolved;
1959 1959
1960 __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic); 1960 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
1961 if (is_invokedynamic) { 1961 if (byte_no == f1_oop) {
1962 // We are resolved if the f1 field contains a non-null CallSite object. 1962 // We are resolved if the f1 field contains a non-null object (CallSite, etc.)
1963 // This kind of CP cache entry does not need to match the flags byte, because
1964 // there is a 1-1 relation between bytecode type and CP entry type.
1965 assert_different_registers(result, Rcache);
1963 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() + 1966 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
1964 ConstantPoolCacheEntry::f1_offset(), Lbyte_code); 1967 ConstantPoolCacheEntry::f1_offset(), result);
1965 __ tst(Lbyte_code); 1968 __ tst(result);
1966 __ br(Assembler::notEqual, false, Assembler::pt, resolved); 1969 __ br(Assembler::notEqual, false, Assembler::pt, resolved);
1967 __ delayed()->set((int)bytecode(), O1); 1970 __ delayed()->set((int)bytecode(), O1);
1968 } else { 1971 } else {
1972 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
1973 assert(result == noreg, ""); //else change code for setting result
1974 const int shift_count = (1 + byte_no)*BitsPerByte;
1975
1969 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() + 1976 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
1970 ConstantPoolCacheEntry::indices_offset(), Lbyte_code); 1977 ConstantPoolCacheEntry::indices_offset(), Lbyte_code);
1971 1978
1972 __ srl( Lbyte_code, shift_count, Lbyte_code ); 1979 __ srl( Lbyte_code, shift_count, Lbyte_code );
1973 __ and3( Lbyte_code, 0xFF, Lbyte_code ); 1980 __ and3( Lbyte_code, 0xFF, Lbyte_code );
1990 default : ShouldNotReachHere(); break; 1997 default : ShouldNotReachHere(); break;
1991 } 1998 }
1992 // first time invocation - must resolve first 1999 // first time invocation - must resolve first
1993 __ call_VM(noreg, entry, O1); 2000 __ call_VM(noreg, entry, O1);
1994 // Update registers with resolved info 2001 // Update registers with resolved info
1995 __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic); 2002 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2003 if (result != noreg)
2004 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
2005 ConstantPoolCacheEntry::f1_offset(), result);
1996 __ bind(resolved); 2006 __ bind(resolved);
1997 } 2007 }
1998 2008
1999 void TemplateTable::load_invoke_cp_cache_entry(int byte_no, 2009 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2000 Register Rmethod, 2010 Register Rmethod,
2001 Register Ritable_index, 2011 Register Ritable_index,
2002 Register Rflags, 2012 Register Rflags,
2003 bool is_invokevirtual, 2013 bool is_invokevirtual,
2004 bool is_invokevfinal) { 2014 bool is_invokevfinal,
2015 bool is_invokedynamic) {
2005 // Uses both G3_scratch and G4_scratch 2016 // Uses both G3_scratch and G4_scratch
2006 Register Rcache = G3_scratch; 2017 Register Rcache = G3_scratch;
2007 Register Rscratch = G4_scratch; 2018 Register Rscratch = G4_scratch;
2008 assert_different_registers(Rcache, Rmethod, Ritable_index); 2019 assert_different_registers(Rcache, Rmethod, Ritable_index);
2009 2020
2023 const int index_offset = in_bytes(cp_base_offset + 2034 const int index_offset = in_bytes(cp_base_offset +
2024 ConstantPoolCacheEntry::f2_offset()); 2035 ConstantPoolCacheEntry::f2_offset());
2025 2036
2026 if (is_invokevfinal) { 2037 if (is_invokevfinal) {
2027 __ get_cache_and_index_at_bcp(Rcache, Rscratch, 1); 2038 __ get_cache_and_index_at_bcp(Rcache, Rscratch, 1);
2039 __ ld_ptr(Rcache, method_offset, Rmethod);
2040 } else if (byte_no == f1_oop) {
2041 // Resolved f1_oop goes directly into 'method' register.
2042 resolve_cache_and_index(byte_no, Rmethod, Rcache, Rscratch, sizeof(u4));
2028 } else { 2043 } else {
2029 resolve_cache_and_index(byte_no, Rcache, Rscratch); 2044 resolve_cache_and_index(byte_no, noreg, Rcache, Rscratch, sizeof(u2));
2030 } 2045 __ ld_ptr(Rcache, method_offset, Rmethod);
2031 2046 }
2032 __ ld_ptr(Rcache, method_offset, Rmethod); 2047
2033 if (Ritable_index != noreg) { 2048 if (Ritable_index != noreg) {
2034 __ ld_ptr(Rcache, index_offset, Ritable_index); 2049 __ ld_ptr(Rcache, index_offset, Ritable_index);
2035 } 2050 }
2036 __ ld_ptr(Rcache, flags_offset, Rflags); 2051 __ ld_ptr(Rcache, flags_offset, Rflags);
2037 } 2052 }
2108 Register Rclass = Rcache; 2123 Register Rclass = Rcache;
2109 Register Roffset= G4_scratch; 2124 Register Roffset= G4_scratch;
2110 Register Rflags = G1_scratch; 2125 Register Rflags = G1_scratch;
2111 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2126 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2112 2127
2113 resolve_cache_and_index(byte_no, Rcache, index); 2128 resolve_cache_and_index(byte_no, noreg, Rcache, index, sizeof(u2));
2114 jvmti_post_field_access(Rcache, index, is_static, false); 2129 jvmti_post_field_access(Rcache, index, is_static, false);
2115 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static); 2130 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
2116 2131
2117 if (!is_static) { 2132 if (!is_static) {
2118 pop_and_check_object(Rclass); 2133 pop_and_check_object(Rclass);
2473 Register Rclass = Rcache; 2488 Register Rclass = Rcache;
2474 Register Roffset= G4_scratch; 2489 Register Roffset= G4_scratch;
2475 Register Rflags = G1_scratch; 2490 Register Rflags = G1_scratch;
2476 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2491 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2477 2492
2478 resolve_cache_and_index(byte_no, Rcache, index); 2493 resolve_cache_and_index(byte_no, noreg, Rcache, index, sizeof(u2));
2479 jvmti_post_field_mod(Rcache, index, is_static); 2494 jvmti_post_field_mod(Rcache, index, is_static);
2480 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static); 2495 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
2481 2496
2482 Assembler::Membar_mask_bits read_bits = 2497 Assembler::Membar_mask_bits read_bits =
2483 Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore); 2498 Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
2814 __ call_from_interpreter(Rcall, Gargs, Rret); 2829 __ call_from_interpreter(Rcall, Gargs, Rret);
2815 } 2830 }
2816 2831
2817 void TemplateTable::invokevirtual(int byte_no) { 2832 void TemplateTable::invokevirtual(int byte_no) {
2818 transition(vtos, vtos); 2833 transition(vtos, vtos);
2834 assert(byte_no == f2_byte, "use this argument");
2819 2835
2820 Register Rscratch = G3_scratch; 2836 Register Rscratch = G3_scratch;
2821 Register Rtemp = G4_scratch; 2837 Register Rtemp = G4_scratch;
2822 Register Rret = Lscratch; 2838 Register Rret = Lscratch;
2823 Register Rrecv = G5_method; 2839 Register Rrecv = G5_method;
2824 Label notFinal; 2840 Label notFinal;
2825 2841
2826 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, true); 2842 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, true, false, false);
2827 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore 2843 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
2828 2844
2829 // Check for vfinal 2845 // Check for vfinal
2830 __ set((1 << ConstantPoolCacheEntry::vfinalMethod), G4_scratch); 2846 __ set((1 << ConstantPoolCacheEntry::vfinalMethod), G4_scratch);
2831 __ btst(Rret, G4_scratch); 2847 __ btst(Rret, G4_scratch);
2862 generate_vtable_call(Rrecv, Rscratch, Rret); 2878 generate_vtable_call(Rrecv, Rscratch, Rret);
2863 } 2879 }
2864 2880
2865 void TemplateTable::fast_invokevfinal(int byte_no) { 2881 void TemplateTable::fast_invokevfinal(int byte_no) {
2866 transition(vtos, vtos); 2882 transition(vtos, vtos);
2883 assert(byte_no == f2_byte, "use this argument");
2867 2884
2868 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Lscratch, true, 2885 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Lscratch, true,
2869 /*is_invokevfinal*/true); 2886 /*is_invokevfinal*/true, false);
2870 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore 2887 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
2871 invokevfinal_helper(G3_scratch, Lscratch); 2888 invokevfinal_helper(G3_scratch, Lscratch);
2872 } 2889 }
2873 2890
2874 void TemplateTable::invokevfinal_helper(Register Rscratch, Register Rret) { 2891 void TemplateTable::invokevfinal_helper(Register Rscratch, Register Rret) {
2899 __ call_from_interpreter(Rscratch, Gargs, Rret); 2916 __ call_from_interpreter(Rscratch, Gargs, Rret);
2900 } 2917 }
2901 2918
2902 void TemplateTable::invokespecial(int byte_no) { 2919 void TemplateTable::invokespecial(int byte_no) {
2903 transition(vtos, vtos); 2920 transition(vtos, vtos);
2921 assert(byte_no == f1_byte, "use this argument");
2904 2922
2905 Register Rscratch = G3_scratch; 2923 Register Rscratch = G3_scratch;
2906 Register Rtemp = G4_scratch; 2924 Register Rtemp = G4_scratch;
2907 Register Rret = Lscratch; 2925 Register Rret = Lscratch;
2908 2926
2909 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, false); 2927 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, /*virtual*/ false, false, false);
2910 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore 2928 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
2911 2929
2912 __ verify_oop(G5_method); 2930 __ verify_oop(G5_method);
2913 2931
2914 __ lduh(G5_method, in_bytes(methodOopDesc::size_of_parameters_offset()), G4_scratch); 2932 __ lduh(G5_method, in_bytes(methodOopDesc::size_of_parameters_offset()), G4_scratch);
2932 __ call_from_interpreter(Rscratch, Gargs, Rret); 2950 __ call_from_interpreter(Rscratch, Gargs, Rret);
2933 } 2951 }
2934 2952
2935 void TemplateTable::invokestatic(int byte_no) { 2953 void TemplateTable::invokestatic(int byte_no) {
2936 transition(vtos, vtos); 2954 transition(vtos, vtos);
2955 assert(byte_no == f1_byte, "use this argument");
2937 2956
2938 Register Rscratch = G3_scratch; 2957 Register Rscratch = G3_scratch;
2939 Register Rtemp = G4_scratch; 2958 Register Rtemp = G4_scratch;
2940 Register Rret = Lscratch; 2959 Register Rret = Lscratch;
2941 2960
2942 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, false); 2961 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, /*virtual*/ false, false, false);
2943 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore 2962 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
2944 2963
2945 __ verify_oop(G5_method); 2964 __ verify_oop(G5_method);
2946 2965
2947 __ profile_call(O4); 2966 __ profile_call(O4);
2990 } 3009 }
2991 3010
2992 3011
2993 void TemplateTable::invokeinterface(int byte_no) { 3012 void TemplateTable::invokeinterface(int byte_no) {
2994 transition(vtos, vtos); 3013 transition(vtos, vtos);
3014 assert(byte_no == f1_byte, "use this argument");
2995 3015
2996 Register Rscratch = G4_scratch; 3016 Register Rscratch = G4_scratch;
2997 Register Rret = G3_scratch; 3017 Register Rret = G3_scratch;
2998 Register Rindex = Lscratch; 3018 Register Rindex = Lscratch;
2999 Register Rinterface = G1_scratch; 3019 Register Rinterface = G1_scratch;
3000 Register RklassOop = G5_method; 3020 Register RklassOop = G5_method;
3001 Register Rflags = O1; 3021 Register Rflags = O1;
3002 assert_different_registers(Rscratch, G5_method); 3022 assert_different_registers(Rscratch, G5_method);
3003 3023
3004 load_invoke_cp_cache_entry(byte_no, Rinterface, Rindex, Rflags, false); 3024 load_invoke_cp_cache_entry(byte_no, Rinterface, Rindex, Rflags, /*virtual*/ false, false, false);
3005 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore 3025 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
3006 3026
3007 // get receiver 3027 // get receiver
3008 __ and3(Rflags, 0xFF, Rscratch); // gets number of parameters 3028 __ and3(Rflags, 0xFF, Rscratch); // gets number of parameters
3009 __ load_receiver(Rscratch, O0); 3029 __ load_receiver(Rscratch, O0);
3116 } 3136 }
3117 3137
3118 3138
3119 void TemplateTable::invokedynamic(int byte_no) { 3139 void TemplateTable::invokedynamic(int byte_no) {
3120 transition(vtos, vtos); 3140 transition(vtos, vtos);
3141 assert(byte_no == f1_oop, "use this argument");
3121 3142
3122 if (!EnableInvokeDynamic) { 3143 if (!EnableInvokeDynamic) {
3123 // We should not encounter this bytecode if !EnableInvokeDynamic. 3144 // We should not encounter this bytecode if !EnableInvokeDynamic.
3124 // The verifier will stop it. However, if we get past the verifier, 3145 // The verifier will stop it. However, if we get past the verifier,
3125 // this will stop the thread in a reasonable way, without crashing the JVM. 3146 // this will stop the thread in a reasonable way, without crashing the JVM.
3130 return; 3151 return;
3131 } 3152 }
3132 3153
3133 // G5: CallSite object (f1) 3154 // G5: CallSite object (f1)
3134 // XX: unused (f2) 3155 // XX: unused (f2)
3135 // G3: receiver address
3136 // XX: flags (unused) 3156 // XX: flags (unused)
3137 3157
3138 Register G5_callsite = G5_method; 3158 Register G5_callsite = G5_method;
3139 Register Rscratch = G3_scratch; 3159 Register Rscratch = G3_scratch;
3140 Register Rtemp = G1_scratch; 3160 Register Rtemp = G1_scratch;
3141 Register Rret = Lscratch; 3161 Register Rret = Lscratch;
3142 3162
3143 load_invoke_cp_cache_entry(byte_no, G5_callsite, noreg, Rret, false); 3163 load_invoke_cp_cache_entry(byte_no, G5_callsite, noreg, Rret,
3164 /*virtual*/ false, /*vfinal*/ false, /*indy*/ true);
3144 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore 3165 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
3145 3166
3146 __ verify_oop(G5_callsite); 3167 __ verify_oop(G5_callsite);
3147 3168
3148 // profile this call 3169 // profile this call