comparison src/cpu/sparc/vm/templateTable_sparc.cpp @ 727:6b2273dd6fa9

6822110: Add AddressLiteral class on SPARC Summary: The Address class on SPARC currently handles both, addresses and address literals, what makes the Address class more complicated than it has to be. Reviewed-by: never, kvn
author twisti
date Tue, 21 Apr 2009 11:16:30 -0700
parents 98cb887364d3
children 85656c8fa13f
comparison
equal deleted inserted replaced
725:928912ce8438 727:6b2273dd6fa9
1 /* 1 /*
2 * Copyright 1997-2008 Sun Microsystems, Inc. All Rights Reserved. 2 * Copyright 1997-2009 Sun Microsystems, Inc. All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
129 // Miscelaneous helper routines 129 // Miscelaneous helper routines
130 130
131 131
132 Address TemplateTable::at_bcp(int offset) { 132 Address TemplateTable::at_bcp(int offset) {
133 assert(_desc->uses_bcp(), "inconsistent uses_bcp information"); 133 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
134 return Address( Lbcp, 0, offset); 134 return Address(Lbcp, offset);
135 } 135 }
136 136
137 137
138 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register Rbyte_code, 138 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register Rbyte_code,
139 Register Rscratch, 139 Register Rscratch,
215 default: ShouldNotReachHere(); 215 default: ShouldNotReachHere();
216 case 0: p = &zero; break; 216 case 0: p = &zero; break;
217 case 1: p = &one; break; 217 case 1: p = &one; break;
218 case 2: p = &two; break; 218 case 2: p = &two; break;
219 } 219 }
220 Address a(G3_scratch, (address)p); 220 AddressLiteral a(p);
221 __ sethi(a); 221 __ sethi(a, G3_scratch);
222 __ ldf(FloatRegisterImpl::S, a, Ftos_f); 222 __ ldf(FloatRegisterImpl::S, G3_scratch, a.low10(), Ftos_f);
223 } 223 }
224 224
225 225
226 void TemplateTable::dconst(int value) { 226 void TemplateTable::dconst(int value) {
227 transition(vtos, dtos); 227 transition(vtos, dtos);
230 switch( value ) { 230 switch( value ) {
231 default: ShouldNotReachHere(); 231 default: ShouldNotReachHere();
232 case 0: p = &zero; break; 232 case 0: p = &zero; break;
233 case 1: p = &one; break; 233 case 1: p = &one; break;
234 } 234 }
235 Address a(G3_scratch, (address)p); 235 AddressLiteral a(p);
236 __ sethi(a); 236 __ sethi(a, G3_scratch);
237 __ ldf(FloatRegisterImpl::D, a, Ftos_d); 237 __ ldf(FloatRegisterImpl::D, G3_scratch, a.low10(), Ftos_d);
238 } 238 }
239 239
240 240
241 // %%%%% Should factore most snippet templates across platforms 241 // %%%%% Should factore most snippet templates across platforms
242 242
1546 // Handle all the JSR stuff here, then exit. 1546 // Handle all the JSR stuff here, then exit.
1547 // It's much shorter and cleaner than intermingling with the 1547 // It's much shorter and cleaner than intermingling with the
1548 // non-JSR normal-branch stuff occurring below. 1548 // non-JSR normal-branch stuff occurring below.
1549 if( is_jsr ) { 1549 if( is_jsr ) {
1550 // compute return address as bci in Otos_i 1550 // compute return address as bci in Otos_i
1551 __ ld_ptr(Address(Lmethod, 0, in_bytes(methodOopDesc::const_offset())), G3_scratch); 1551 __ ld_ptr(Lmethod, methodOopDesc::const_offset(), G3_scratch);
1552 __ sub(Lbcp, G3_scratch, G3_scratch); 1552 __ sub(Lbcp, G3_scratch, G3_scratch);
1553 __ sub(G3_scratch, in_bytes(constMethodOopDesc::codes_offset()) - (is_wide ? 5 : 3), Otos_i); 1553 __ sub(G3_scratch, in_bytes(constMethodOopDesc::codes_offset()) - (is_wide ? 5 : 3), Otos_i);
1554 1554
1555 // Bump Lbcp to target of JSR 1555 // Bump Lbcp to target of JSR
1556 __ add(Lbcp, O1_disp, Lbcp); 1556 __ add(Lbcp, O1_disp, Lbcp);
1663 #endif 1663 #endif
1664 #endif 1664 #endif
1665 1665
1666 __ profile_ret(vtos, Otos_i, G4_scratch); 1666 __ profile_ret(vtos, Otos_i, G4_scratch);
1667 1667
1668 __ ld_ptr(Address(Lmethod, 0, in_bytes(methodOopDesc::const_offset())), G3_scratch); 1668 __ ld_ptr(Lmethod, methodOopDesc::const_offset(), G3_scratch);
1669 __ add(G3_scratch, Otos_i, G3_scratch); 1669 __ add(G3_scratch, Otos_i, G3_scratch);
1670 __ add(G3_scratch, in_bytes(constMethodOopDesc::codes_offset()), Lbcp); 1670 __ add(G3_scratch, in_bytes(constMethodOopDesc::codes_offset()), Lbcp);
1671 __ dispatch_next(vtos); 1671 __ dispatch_next(vtos);
1672 } 1672 }
1673 1673
1678 __ access_local_returnAddress(G3_scratch, Otos_i); 1678 __ access_local_returnAddress(G3_scratch, Otos_i);
1679 // Otos_i contains the bci, compute the bcp from that 1679 // Otos_i contains the bci, compute the bcp from that
1680 1680
1681 __ profile_ret(vtos, Otos_i, G4_scratch); 1681 __ profile_ret(vtos, Otos_i, G4_scratch);
1682 1682
1683 __ ld_ptr(Address(Lmethod, 0, in_bytes(methodOopDesc::const_offset())), G3_scratch); 1683 __ ld_ptr(Lmethod, methodOopDesc::const_offset(), G3_scratch);
1684 __ add(G3_scratch, Otos_i, G3_scratch); 1684 __ add(G3_scratch, Otos_i, G3_scratch);
1685 __ add(G3_scratch, in_bytes(constMethodOopDesc::codes_offset()), Lbcp); 1685 __ add(G3_scratch, in_bytes(constMethodOopDesc::codes_offset()), Lbcp);
1686 __ dispatch_next(vtos); 1686 __ dispatch_next(vtos);
1687 } 1687 }
1688 1688
1966 // Depends on cpCacheOop layout! 1966 // Depends on cpCacheOop layout!
1967 const int shift_count = (1 + byte_no)*BitsPerByte; 1967 const int shift_count = (1 + byte_no)*BitsPerByte;
1968 Label resolved; 1968 Label resolved;
1969 1969
1970 __ get_cache_and_index_at_bcp(Rcache, index, 1); 1970 __ get_cache_and_index_at_bcp(Rcache, index, 1);
1971 __ ld_ptr(Address(Rcache, 0, in_bytes(constantPoolCacheOopDesc::base_offset() + 1971 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
1972 ConstantPoolCacheEntry::indices_offset())), Lbyte_code); 1972 ConstantPoolCacheEntry::indices_offset(), Lbyte_code);
1973 1973
1974 __ srl( Lbyte_code, shift_count, Lbyte_code ); 1974 __ srl( Lbyte_code, shift_count, Lbyte_code );
1975 __ and3( Lbyte_code, 0xFF, Lbyte_code ); 1975 __ and3( Lbyte_code, 0xFF, Lbyte_code );
1976 __ cmp( Lbyte_code, (int)bytecode()); 1976 __ cmp( Lbyte_code, (int)bytecode());
1977 __ br( Assembler::equal, false, Assembler::pt, resolved); 1977 __ br( Assembler::equal, false, Assembler::pt, resolved);
2027 __ get_cache_and_index_at_bcp(Rcache, Rscratch, 1); 2027 __ get_cache_and_index_at_bcp(Rcache, Rscratch, 1);
2028 } else { 2028 } else {
2029 resolve_cache_and_index(byte_no, Rcache, Rscratch); 2029 resolve_cache_and_index(byte_no, Rcache, Rscratch);
2030 } 2030 }
2031 2031
2032 __ ld_ptr(Address(Rcache, 0, method_offset), Rmethod); 2032 __ ld_ptr(Rcache, method_offset, Rmethod);
2033 if (Ritable_index != noreg) { 2033 if (Ritable_index != noreg) {
2034 __ ld_ptr(Address(Rcache, 0, index_offset), Ritable_index); 2034 __ ld_ptr(Rcache, index_offset, Ritable_index);
2035 } 2035 }
2036 __ ld_ptr(Address(Rcache, 0, flags_offset), Rflags); 2036 __ ld_ptr(Rcache, flags_offset, Rflags);
2037 } 2037 }
2038 2038
2039 // The Rcache register must be set before call 2039 // The Rcache register must be set before call
2040 void TemplateTable::load_field_cp_cache_entry(Register Robj, 2040 void TemplateTable::load_field_cp_cache_entry(Register Robj,
2041 Register Rcache, 2041 Register Rcache,
2045 bool is_static) { 2045 bool is_static) {
2046 assert_different_registers(Rcache, Rflags, Roffset); 2046 assert_different_registers(Rcache, Rflags, Roffset);
2047 2047
2048 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2048 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2049 2049
2050 __ ld_ptr(Address(Rcache, 0, in_bytes(cp_base_offset + 2050 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
2051 ConstantPoolCacheEntry::flags_offset())), Rflags); 2051 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset);
2052 __ ld_ptr(Address(Rcache, 0, in_bytes(cp_base_offset +
2053 ConstantPoolCacheEntry::f2_offset())), Roffset);
2054 if (is_static) { 2052 if (is_static) {
2055 __ ld_ptr(Address(Rcache, 0, in_bytes(cp_base_offset + 2053 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f1_offset(), Robj);
2056 ConstantPoolCacheEntry::f1_offset())), Robj);
2057 } 2054 }
2058 } 2055 }
2059 2056
2060 // The registers Rcache and index expected to be set before call. 2057 // The registers Rcache and index expected to be set before call.
2061 // Correct values of the Rcache and index registers are preserved. 2058 // Correct values of the Rcache and index registers are preserved.
2068 if (JvmtiExport::can_post_field_access()) { 2065 if (JvmtiExport::can_post_field_access()) {
2069 // Check to see if a field access watch has been set before we take 2066 // Check to see if a field access watch has been set before we take
2070 // the time to call into the VM. 2067 // the time to call into the VM.
2071 Label Label1; 2068 Label Label1;
2072 assert_different_registers(Rcache, index, G1_scratch); 2069 assert_different_registers(Rcache, index, G1_scratch);
2073 Address get_field_access_count_addr(G1_scratch, 2070 AddressLiteral get_field_access_count_addr(JvmtiExport::get_field_access_count_addr());
2074 (address)JvmtiExport::get_field_access_count_addr(),
2075 relocInfo::none);
2076 __ load_contents(get_field_access_count_addr, G1_scratch); 2071 __ load_contents(get_field_access_count_addr, G1_scratch);
2077 __ tst(G1_scratch); 2072 __ tst(G1_scratch);
2078 __ br(Assembler::zero, false, Assembler::pt, Label1); 2073 __ br(Assembler::zero, false, Assembler::pt, Label1);
2079 __ delayed()->nop(); 2074 __ delayed()->nop();
2080 2075
2291 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2286 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2292 2287
2293 __ get_cache_and_index_at_bcp(Rcache, index, 1); 2288 __ get_cache_and_index_at_bcp(Rcache, index, 1);
2294 jvmti_post_field_access(Rcache, index, /*is_static*/false, /*has_tos*/true); 2289 jvmti_post_field_access(Rcache, index, /*is_static*/false, /*has_tos*/true);
2295 2290
2296 __ ld_ptr(Address(Rcache, 0, in_bytes(cp_base_offset + ConstantPoolCacheEntry::f2_offset())), Roffset); 2291 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset);
2297 2292
2298 __ null_check(Otos_i); 2293 __ null_check(Otos_i);
2299 __ verify_oop(Otos_i); 2294 __ verify_oop(Otos_i);
2300 2295
2301 Label exit; 2296 Label exit;
2302 2297
2303 Assembler::Membar_mask_bits membar_bits = 2298 Assembler::Membar_mask_bits membar_bits =
2304 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore); 2299 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
2305 if (__ membar_has_effect(membar_bits)) { 2300 if (__ membar_has_effect(membar_bits)) {
2306 // Get volatile flag 2301 // Get volatile flag
2307 __ ld_ptr(Address(Rcache, 0, in_bytes(cp_base_offset + ConstantPoolCacheEntry::f2_offset())), Rflags); 2302 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Rflags);
2308 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch); 2303 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
2309 } 2304 }
2310 2305
2311 switch (bytecode()) { 2306 switch (bytecode()) {
2312 case Bytecodes::_fast_bgetfield: 2307 case Bytecodes::_fast_bgetfield:
2353 void TemplateTable::jvmti_post_fast_field_mod() { 2348 void TemplateTable::jvmti_post_fast_field_mod() {
2354 if (JvmtiExport::can_post_field_modification()) { 2349 if (JvmtiExport::can_post_field_modification()) {
2355 // Check to see if a field modification watch has been set before we take 2350 // Check to see if a field modification watch has been set before we take
2356 // the time to call into the VM. 2351 // the time to call into the VM.
2357 Label done; 2352 Label done;
2358 Address get_field_modification_count_addr(G4_scratch, (address)JvmtiExport::get_field_modification_count_addr(), relocInfo::none); 2353 AddressLiteral get_field_modification_count_addr(JvmtiExport::get_field_modification_count_addr());
2359 __ load_contents(get_field_modification_count_addr, G4_scratch); 2354 __ load_contents(get_field_modification_count_addr, G4_scratch);
2360 __ tst(G4_scratch); 2355 __ tst(G4_scratch);
2361 __ br(Assembler::zero, false, Assembler::pt, done); 2356 __ br(Assembler::zero, false, Assembler::pt, done);
2362 __ delayed()->nop(); 2357 __ delayed()->nop();
2363 __ pop_ptr(G4_scratch); // copy the object pointer from tos 2358 __ pop_ptr(G4_scratch); // copy the object pointer from tos
2406 if (JvmtiExport::can_post_field_modification()) { 2401 if (JvmtiExport::can_post_field_modification()) {
2407 // Check to see if a field modification watch has been set before we take 2402 // Check to see if a field modification watch has been set before we take
2408 // the time to call into the VM. 2403 // the time to call into the VM.
2409 Label Label1; 2404 Label Label1;
2410 assert_different_registers(Rcache, index, G1_scratch); 2405 assert_different_registers(Rcache, index, G1_scratch);
2411 Address get_field_modification_count_addr(G1_scratch, 2406 AddressLiteral get_field_modification_count_addr(JvmtiExport::get_field_modification_count_addr());
2412 (address)JvmtiExport::get_field_modification_count_addr(),
2413 relocInfo::none);
2414 __ load_contents(get_field_modification_count_addr, G1_scratch); 2407 __ load_contents(get_field_modification_count_addr, G1_scratch);
2415 __ tst(G1_scratch); 2408 __ tst(G1_scratch);
2416 __ br(Assembler::zero, false, Assembler::pt, Label1); 2409 __ br(Assembler::zero, false, Assembler::pt, Label1);
2417 __ delayed()->nop(); 2410 __ delayed()->nop();
2418 2411
2431 // object. We don't know the size of the value, though; it could be 2424 // object. We don't know the size of the value, though; it could be
2432 // one or two words depending on its type. As a result, we must find 2425 // one or two words depending on its type. As a result, we must find
2433 // the type to determine where the object is. 2426 // the type to determine where the object is.
2434 2427
2435 Label two_word, valsizeknown; 2428 Label two_word, valsizeknown;
2436 __ ld_ptr(Address(G1_scratch, 0, in_bytes(cp_base_offset + ConstantPoolCacheEntry::flags_offset())), Rflags); 2429 __ ld_ptr(G1_scratch, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
2437 __ mov(Lesp, G4_scratch); 2430 __ mov(Lesp, G4_scratch);
2438 __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags); 2431 __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
2439 // Make sure we don't need to mask Rflags for tosBits after the above shift 2432 // Make sure we don't need to mask Rflags for tosBits after the above shift
2440 ConstantPoolCacheEntry::verify_tosBits(); 2433 ConstantPoolCacheEntry::verify_tosBits();
2441 __ cmp(Rflags, ltos); 2434 __ cmp(Rflags, ltos);
2687 Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore); 2680 Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
2688 Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad; 2681 Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
2689 2682
2690 Label notVolatile, checkVolatile, exit; 2683 Label notVolatile, checkVolatile, exit;
2691 if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) { 2684 if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
2692 __ ld_ptr(Address(Rcache, 0, in_bytes(cp_base_offset + 2685 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
2693 ConstantPoolCacheEntry::flags_offset())), Rflags);
2694 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch); 2686 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
2695 __ and3(Rflags, Lscratch, Lscratch); 2687 __ and3(Rflags, Lscratch, Lscratch);
2696 if (__ membar_has_effect(read_bits)) { 2688 if (__ membar_has_effect(read_bits)) {
2697 __ tst(Lscratch); 2689 __ tst(Lscratch);
2698 __ br(Assembler::zero, false, Assembler::pt, notVolatile); 2690 __ br(Assembler::zero, false, Assembler::pt, notVolatile);
2700 volatile_barrier(read_bits); 2692 volatile_barrier(read_bits);
2701 __ bind(notVolatile); 2693 __ bind(notVolatile);
2702 } 2694 }
2703 } 2695 }
2704 2696
2705 __ ld_ptr(Address(Rcache, 0, in_bytes(cp_base_offset + 2697 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset);
2706 ConstantPoolCacheEntry::f2_offset())), Roffset);
2707 pop_and_check_object(Rclass); 2698 pop_and_check_object(Rclass);
2708 2699
2709 switch (bytecode()) { 2700 switch (bytecode()) {
2710 case Bytecodes::_fast_bputfield: __ stb(Otos_i, Rclass, Roffset); break; 2701 case Bytecodes::_fast_bputfield: __ stb(Otos_i, Rclass, Roffset); break;
2711 case Bytecodes::_fast_cputfield: /* fall through */ 2702 case Bytecodes::_fast_cputfield: /* fall through */
2753 2744
2754 __ ld_ptr(Llocals, Interpreter::value_offset_in_bytes(), Rreceiver); 2745 __ ld_ptr(Llocals, Interpreter::value_offset_in_bytes(), Rreceiver);
2755 2746
2756 // access constant pool cache (is resolved) 2747 // access constant pool cache (is resolved)
2757 __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 2); 2748 __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 2);
2758 __ ld_ptr(Address(Rcache, 0, in_bytes(constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f2_offset())), Roffset); 2749 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f2_offset(), Roffset);
2759 __ add(Lbcp, 1, Lbcp); // needed to report exception at the correct bcp 2750 __ add(Lbcp, 1, Lbcp); // needed to report exception at the correct bcp
2760 2751
2761 __ verify_oop(Rreceiver); 2752 __ verify_oop(Rreceiver);
2762 __ null_check(Rreceiver); 2753 __ null_check(Rreceiver);
2763 if (state == atos) { 2754 if (state == atos) {
2773 Assembler::Membar_mask_bits membar_bits = 2764 Assembler::Membar_mask_bits membar_bits =
2774 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore); 2765 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
2775 if (__ membar_has_effect(membar_bits)) { 2766 if (__ membar_has_effect(membar_bits)) {
2776 2767
2777 // Get is_volatile value in Rflags and check if membar is needed 2768 // Get is_volatile value in Rflags and check if membar is needed
2778 __ ld_ptr(Address(Rcache, 0, in_bytes(constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::flags_offset())), Rflags); 2769 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::flags_offset(), Rflags);
2779 2770
2780 // Test volatile 2771 // Test volatile
2781 Label notVolatile; 2772 Label notVolatile;
2782 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch); 2773 __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
2783 __ btst(Rflags, Lscratch); 2774 __ btst(Rflags, Lscratch);
2851 __ load_receiver(G4_scratch, O0); // gets receiverOop 2842 __ load_receiver(G4_scratch, O0); // gets receiverOop
2852 // receiver is in O0 2843 // receiver is in O0
2853 __ verify_oop(O0); 2844 __ verify_oop(O0);
2854 2845
2855 // get return address 2846 // get return address
2856 Address table(Rtemp, (address)Interpreter::return_3_addrs_by_index_table()); 2847 AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
2857 __ load_address(table); 2848 __ set(table, Rtemp);
2858 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type 2849 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
2859 // Make sure we don't need to mask Rret for tosBits after the above shift 2850 // Make sure we don't need to mask Rret for tosBits after the above shift
2860 ConstantPoolCacheEntry::verify_tosBits(); 2851 ConstantPoolCacheEntry::verify_tosBits();
2861 __ sll(Rret, LogBytesPerWord, Rret); 2852 __ sll(Rret, LogBytesPerWord, Rret);
2862 __ ld_ptr(Rtemp, Rret, Rret); // get return address 2853 __ ld_ptr(Rtemp, Rret, Rret); // get return address
2884 Register Rtemp = G4_scratch; 2875 Register Rtemp = G4_scratch;
2885 2876
2886 __ verify_oop(G5_method); 2877 __ verify_oop(G5_method);
2887 2878
2888 // Load receiver from stack slot 2879 // Load receiver from stack slot
2889 __ lduh(Address(G5_method, 0, in_bytes(methodOopDesc::size_of_parameters_offset())), G4_scratch); 2880 __ lduh(G5_method, in_bytes(methodOopDesc::size_of_parameters_offset()), G4_scratch);
2890 __ load_receiver(G4_scratch, O0); 2881 __ load_receiver(G4_scratch, O0);
2891 2882
2892 // receiver NULL check 2883 // receiver NULL check
2893 __ null_check(O0); 2884 __ null_check(O0);
2894 2885
2895 __ profile_final_call(O4); 2886 __ profile_final_call(O4);
2896 2887
2897 // get return address 2888 // get return address
2898 Address table(Rtemp, (address)Interpreter::return_3_addrs_by_index_table()); 2889 AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
2899 __ load_address(table); 2890 __ set(table, Rtemp);
2900 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type 2891 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
2901 // Make sure we don't need to mask Rret for tosBits after the above shift 2892 // Make sure we don't need to mask Rret for tosBits after the above shift
2902 ConstantPoolCacheEntry::verify_tosBits(); 2893 ConstantPoolCacheEntry::verify_tosBits();
2903 __ sll(Rret, LogBytesPerWord, Rret); 2894 __ sll(Rret, LogBytesPerWord, Rret);
2904 __ ld_ptr(Rtemp, Rret, Rret); // get return address 2895 __ ld_ptr(Rtemp, Rret, Rret); // get return address
2918 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, false); 2909 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, false);
2919 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore 2910 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
2920 2911
2921 __ verify_oop(G5_method); 2912 __ verify_oop(G5_method);
2922 2913
2923 __ lduh(Address(G5_method, 0, in_bytes(methodOopDesc::size_of_parameters_offset())), G4_scratch); 2914 __ lduh(G5_method, in_bytes(methodOopDesc::size_of_parameters_offset()), G4_scratch);
2924 __ load_receiver(G4_scratch, O0); 2915 __ load_receiver(G4_scratch, O0);
2925 2916
2926 // receiver NULL check 2917 // receiver NULL check
2927 __ null_check(O0); 2918 __ null_check(O0);
2928 2919
2929 __ profile_call(O4); 2920 __ profile_call(O4);
2930 2921
2931 // get return address 2922 // get return address
2932 Address table(Rtemp, (address)Interpreter::return_3_addrs_by_index_table()); 2923 AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
2933 __ load_address(table); 2924 __ set(table, Rtemp);
2934 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type 2925 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
2935 // Make sure we don't need to mask Rret for tosBits after the above shift 2926 // Make sure we don't need to mask Rret for tosBits after the above shift
2936 ConstantPoolCacheEntry::verify_tosBits(); 2927 ConstantPoolCacheEntry::verify_tosBits();
2937 __ sll(Rret, LogBytesPerWord, Rret); 2928 __ sll(Rret, LogBytesPerWord, Rret);
2938 __ ld_ptr(Rtemp, Rret, Rret); // get return address 2929 __ ld_ptr(Rtemp, Rret, Rret); // get return address
2954 __ verify_oop(G5_method); 2945 __ verify_oop(G5_method);
2955 2946
2956 __ profile_call(O4); 2947 __ profile_call(O4);
2957 2948
2958 // get return address 2949 // get return address
2959 Address table(Rtemp, (address)Interpreter::return_3_addrs_by_index_table()); 2950 AddressLiteral table(Interpreter::return_3_addrs_by_index_table());
2960 __ load_address(table); 2951 __ set(table, Rtemp);
2961 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type 2952 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
2962 // Make sure we don't need to mask Rret for tosBits after the above shift 2953 // Make sure we don't need to mask Rret for tosBits after the above shift
2963 ConstantPoolCacheEntry::verify_tosBits(); 2954 ConstantPoolCacheEntry::verify_tosBits();
2964 __ sll(Rret, LogBytesPerWord, Rret); 2955 __ sll(Rret, LogBytesPerWord, Rret);
2965 __ ld_ptr(Rtemp, Rret, Rret); // get return address 2956 __ ld_ptr(Rtemp, Rret, Rret); // get return address
3019 __ verify_oop(O0); 3010 __ verify_oop(O0);
3020 3011
3021 __ mov(Rflags, Rret); 3012 __ mov(Rflags, Rret);
3022 3013
3023 // get return address 3014 // get return address
3024 Address table(Rscratch, (address)Interpreter::return_5_addrs_by_index_table()); 3015 AddressLiteral table(Interpreter::return_5_addrs_by_index_table());
3025 __ load_address(table); 3016 __ set(table, Rscratch);
3026 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type 3017 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
3027 // Make sure we don't need to mask Rret for tosBits after the above shift 3018 // Make sure we don't need to mask Rret for tosBits after the above shift
3028 ConstantPoolCacheEntry::verify_tosBits(); 3019 ConstantPoolCacheEntry::verify_tosBits();
3029 __ sll(Rret, LogBytesPerWord, Rret); 3020 __ sll(Rret, LogBytesPerWord, Rret);
3030 __ ld_ptr(Rscratch, Rret, Rret); // get return address 3021 __ ld_ptr(Rscratch, Rret, Rret); // get return address
3057 // compute start of first itableOffsetEntry (which is at end of vtable) 3048 // compute start of first itableOffsetEntry (which is at end of vtable)
3058 const int base = instanceKlass::vtable_start_offset() * wordSize; 3049 const int base = instanceKlass::vtable_start_offset() * wordSize;
3059 Label search; 3050 Label search;
3060 Register Rtemp = Rflags; 3051 Register Rtemp = Rflags;
3061 3052
3062 __ ld(Address(RklassOop, 0, instanceKlass::vtable_length_offset() * wordSize), Rtemp); 3053 __ ld(RklassOop, instanceKlass::vtable_length_offset() * wordSize, Rtemp);
3063 if (align_object_offset(1) > 1) { 3054 if (align_object_offset(1) > 1) {
3064 __ round_to(Rtemp, align_object_offset(1)); 3055 __ round_to(Rtemp, align_object_offset(1));
3065 } 3056 }
3066 __ sll(Rtemp, LogBytesPerWord, Rtemp); // Rscratch *= 4; 3057 __ sll(Rtemp, LogBytesPerWord, Rtemp); // Rscratch *= 4;
3067 if (Assembler::is_simm13(base)) { 3058 if (Assembler::is_simm13(base)) {
3622 3613
3623 void TemplateTable::wide() { 3614 void TemplateTable::wide() {
3624 transition(vtos, vtos); 3615 transition(vtos, vtos);
3625 __ ldub(Lbcp, 1, G3_scratch);// get next bc 3616 __ ldub(Lbcp, 1, G3_scratch);// get next bc
3626 __ sll(G3_scratch, LogBytesPerWord, G3_scratch); 3617 __ sll(G3_scratch, LogBytesPerWord, G3_scratch);
3627 Address ep(G4_scratch, (address)Interpreter::_wentry_point); 3618 AddressLiteral ep(Interpreter::_wentry_point);
3628 __ load_address(ep); 3619 __ set(ep, G4_scratch);
3629 __ ld_ptr(ep.base(), G3_scratch, G3_scratch); 3620 __ ld_ptr(G4_scratch, G3_scratch, G3_scratch);
3630 __ jmp(G3_scratch, G0); 3621 __ jmp(G3_scratch, G0);
3631 __ delayed()->nop(); 3622 __ delayed()->nop();
3632 // Note: the Lbcp increment step is part of the individual wide bytecode implementations 3623 // Note: the Lbcp increment step is part of the individual wide bytecode implementations
3633 } 3624 }
3634 3625