comparison src/cpu/sparc/vm/templateTable_sparc.cpp @ 1503:c640000b7cc1

6829193: JSR 292 needs to support SPARC Summary: There are unimplemented portions of the hotspot code for method handles and invokedynamic specific to SPARC. Reviewed-by: kvn, never, jrose
author twisti
date Thu, 29 Apr 2010 06:30:25 -0700
parents 85656c8fa13f
children 2338d41fbd81
comparison
equal deleted inserted replaced
1399:90acda19b80f 1503:c640000b7cc1
1 /* 1 /*
2 * Copyright 1997-2009 Sun Microsystems, Inc. All Rights Reserved. 2 * Copyright 1997-2010 Sun Microsystems, Inc. All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
1961 } 1961 }
1962 1962
1963 // ---------------------------------------------------------------------------- 1963 // ----------------------------------------------------------------------------
1964 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) { 1964 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) {
1965 assert(byte_no == 1 || byte_no == 2, "byte_no out of range"); 1965 assert(byte_no == 1 || byte_no == 2, "byte_no out of range");
1966 bool is_invokedynamic = (bytecode() == Bytecodes::_invokedynamic);
1967
1966 // Depends on cpCacheOop layout! 1968 // Depends on cpCacheOop layout!
1967 const int shift_count = (1 + byte_no)*BitsPerByte; 1969 const int shift_count = (1 + byte_no)*BitsPerByte;
1968 Label resolved; 1970 Label resolved;
1969 1971
1970 __ get_cache_and_index_at_bcp(Rcache, index, 1); 1972 __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic);
1971 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() + 1973 if (is_invokedynamic) {
1972 ConstantPoolCacheEntry::indices_offset(), Lbyte_code); 1974 // We are resolved if the f1 field contains a non-null CallSite object.
1973 1975 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
1974 __ srl( Lbyte_code, shift_count, Lbyte_code ); 1976 ConstantPoolCacheEntry::f1_offset(), Lbyte_code);
1975 __ and3( Lbyte_code, 0xFF, Lbyte_code ); 1977 __ tst(Lbyte_code);
1976 __ cmp( Lbyte_code, (int)bytecode()); 1978 __ br(Assembler::notEqual, false, Assembler::pt, resolved);
1977 __ br( Assembler::equal, false, Assembler::pt, resolved); 1979 __ delayed()->set((int)bytecode(), O1);
1978 __ delayed()->set((int)bytecode(), O1); 1980 } else {
1981 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
1982 ConstantPoolCacheEntry::indices_offset(), Lbyte_code);
1983
1984 __ srl( Lbyte_code, shift_count, Lbyte_code );
1985 __ and3( Lbyte_code, 0xFF, Lbyte_code );
1986 __ cmp( Lbyte_code, (int)bytecode());
1987 __ br( Assembler::equal, false, Assembler::pt, resolved);
1988 __ delayed()->set((int)bytecode(), O1);
1989 }
1979 1990
1980 address entry; 1991 address entry;
1981 switch (bytecode()) { 1992 switch (bytecode()) {
1982 case Bytecodes::_getstatic : // fall through 1993 case Bytecodes::_getstatic : // fall through
1983 case Bytecodes::_putstatic : // fall through 1994 case Bytecodes::_putstatic : // fall through
1985 case Bytecodes::_putfield : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break; 1996 case Bytecodes::_putfield : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break;
1986 case Bytecodes::_invokevirtual : // fall through 1997 case Bytecodes::_invokevirtual : // fall through
1987 case Bytecodes::_invokespecial : // fall through 1998 case Bytecodes::_invokespecial : // fall through
1988 case Bytecodes::_invokestatic : // fall through 1999 case Bytecodes::_invokestatic : // fall through
1989 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break; 2000 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break;
2001 case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break;
1990 default : ShouldNotReachHere(); break; 2002 default : ShouldNotReachHere(); break;
1991 } 2003 }
1992 // first time invocation - must resolve first 2004 // first time invocation - must resolve first
1993 __ call_VM(noreg, entry, O1); 2005 __ call_VM(noreg, entry, O1);
1994 // Update registers with resolved info 2006 // Update registers with resolved info
1995 __ get_cache_and_index_at_bcp(Rcache, index, 1); 2007 __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic);
1996 __ bind(resolved); 2008 __ bind(resolved);
1997 } 2009 }
1998 2010
1999 void TemplateTable::load_invoke_cp_cache_entry(int byte_no, 2011 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2000 Register Rmethod, 2012 Register Rmethod,
3128 // the call_VM checks for exception, so we should never return here. 3140 // the call_VM checks for exception, so we should never return here.
3129 __ should_not_reach_here(); 3141 __ should_not_reach_here();
3130 return; 3142 return;
3131 } 3143 }
3132 3144
3133 __ stop("invokedynamic NYI");//6815692// 3145 // G5: CallSite object (f1)
3146 // XX: unused (f2)
3147 // G3: receiver address
3148 // XX: flags (unused)
3149
3150 Register G5_callsite = G5_method;
3151 Register Rscratch = G3_scratch;
3152 Register Rtemp = G1_scratch;
3153 Register Rret = Lscratch;
3154
3155 load_invoke_cp_cache_entry(byte_no, G5_callsite, noreg, Rret, false);
3156 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
3157
3158 __ verify_oop(G5_callsite);
3159
3160 // profile this call
3161 __ profile_call(O4);
3162
3163 // get return address
3164 AddressLiteral table(Interpreter::return_5_addrs_by_index_table());
3165 __ set(table, Rtemp);
3166 __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret); // get return type
3167 // Make sure we don't need to mask Rret for tosBits after the above shift
3168 ConstantPoolCacheEntry::verify_tosBits();
3169 __ sll(Rret, LogBytesPerWord, Rret);
3170 __ ld_ptr(Rtemp, Rret, Rret); // get return address
3171
3172 __ ld_ptr(G5_callsite, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, Rscratch), G3_method_handle);
3173 __ null_check(G3_method_handle);
3174
3175 // Adjust Rret first so Llast_SP can be same as Rret
3176 __ add(Rret, -frame::pc_return_offset, O7);
3177 __ add(Lesp, BytesPerWord, Gargs); // setup parameter pointer
3178 __ jump_to_method_handle_entry(G3_method_handle, Rtemp, /* emit_delayed_nop */ false);
3179 // Record SP so we can remove any stack space allocated by adapter transition
3180 __ delayed()->mov(SP, Llast_SP);
3134 } 3181 }
3135 3182
3136 3183
3137 //---------------------------------------------------------------------------------------------------- 3184 //----------------------------------------------------------------------------------------------------
3138 // Allocation 3185 // Allocation