comparison src/cpu/sparc/vm/templateTable_sparc.cpp @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 1d7922586cf6
children 75f33eecc1b3
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
1 /* 1 /*
2 * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved. 2 * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
25 #include "precompiled.hpp" 25 #include "precompiled.hpp"
26 #include "interpreter/interpreter.hpp" 26 #include "interpreter/interpreter.hpp"
27 #include "interpreter/interpreterRuntime.hpp" 27 #include "interpreter/interpreterRuntime.hpp"
28 #include "interpreter/templateTable.hpp" 28 #include "interpreter/templateTable.hpp"
29 #include "memory/universe.inline.hpp" 29 #include "memory/universe.inline.hpp"
30 #include "oops/methodDataOop.hpp" 30 #include "oops/methodData.hpp"
31 #include "oops/objArrayKlass.hpp" 31 #include "oops/objArrayKlass.hpp"
32 #include "oops/oop.inline.hpp" 32 #include "oops/oop.inline.hpp"
33 #include "prims/methodHandles.hpp" 33 #include "prims/methodHandles.hpp"
34 #include "runtime/sharedRuntime.hpp" 34 #include "runtime/sharedRuntime.hpp"
35 #include "runtime/stubRoutines.hpp" 35 #include "runtime/stubRoutines.hpp"
150 150
151 151
152 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg, 152 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
153 Register temp_reg, bool load_bc_into_bc_reg/*=true*/, 153 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
154 int byte_no) { 154 int byte_no) {
155 // With sharing on, may need to test methodOop flag. 155 // With sharing on, may need to test Method* flag.
156 if (!RewriteBytecodes) return; 156 if (!RewriteBytecodes) return;
157 Label L_patch_done; 157 Label L_patch_done;
158 158
159 switch (bc) { 159 switch (bc) {
160 case Bytecodes::_fast_aputfield: 160 case Bytecodes::_fast_aputfield:
302 } else { 302 } else {
303 __ ldub(Lbcp, 1, O1); 303 __ ldub(Lbcp, 1, O1);
304 } 304 }
305 __ get_cpool_and_tags(O0, O2); 305 __ get_cpool_and_tags(O0, O2);
306 306
307 const int base_offset = constantPoolOopDesc::header_size() * wordSize; 307 const int base_offset = ConstantPool::header_size() * wordSize;
308 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; 308 const int tags_offset = Array<u1>::base_offset_in_bytes();
309 309
310 // get type from tags 310 // get type from tags
311 __ add(O2, tags_offset, O2); 311 __ add(O2, tags_offset, O2);
312 __ ldub(O2, O1, O2); 312 __ ldub(O2, O1, O2);
313 // unresolved string? If so, must resolve
314 __ cmp_and_brx_short(O2, JVM_CONSTANT_UnresolvedString, Assembler::equal, Assembler::pt, call_ldc);
315 313
316 // unresolved class? If so, must resolve 314 // unresolved class? If so, must resolve
317 __ cmp_and_brx_short(O2, JVM_CONSTANT_UnresolvedClass, Assembler::equal, Assembler::pt, call_ldc); 315 __ cmp_and_brx_short(O2, JVM_CONSTANT_UnresolvedClass, Assembler::equal, Assembler::pt, call_ldc);
318 316
319 // unresolved class in error state 317 // unresolved class in error state
344 __ brx(Assembler::equal, true, Assembler::pt, isString); 342 __ brx(Assembler::equal, true, Assembler::pt, isString);
345 __ delayed()->cmp(O2, JVM_CONSTANT_Object); 343 __ delayed()->cmp(O2, JVM_CONSTANT_Object);
346 __ brx(Assembler::notEqual, true, Assembler::pt, notString); 344 __ brx(Assembler::notEqual, true, Assembler::pt, notString);
347 __ delayed()->ldf(FloatRegisterImpl::S, O0, O1, Ftos_f); 345 __ delayed()->ldf(FloatRegisterImpl::S, O0, O1, Ftos_f);
348 __ bind(isString); 346 __ bind(isString);
349 __ ld_ptr(O0, O1, Otos_i); 347 __ stop("string should be rewritten to fast_aldc");
350 __ verify_oop(Otos_i);
351 __ push(atos);
352 __ ba_short(exit); 348 __ ba_short(exit);
353 349
354 __ bind(notString); 350 __ bind(notString);
355 // __ ldf(FloatRegisterImpl::S, O0, O1, Ftos_f); 351 // __ ldf(FloatRegisterImpl::S, O0, O1, Ftos_f);
356 __ push(ftos); 352 __ push(ftos);
362 // %%% We should use this to handle Class and String constants also. 358 // %%% We should use this to handle Class and String constants also.
363 // %%% It will simplify the ldc/primitive path considerably. 359 // %%% It will simplify the ldc/primitive path considerably.
364 void TemplateTable::fast_aldc(bool wide) { 360 void TemplateTable::fast_aldc(bool wide) {
365 transition(vtos, atos); 361 transition(vtos, atos);
366 362
367 if (!EnableInvokeDynamic) { 363 int index_size = wide ? sizeof(u2) : sizeof(u1);
368 // We should not encounter this bytecode if !EnableInvokeDynamic. 364 Label resolved;
369 // The verifier will stop it. However, if we get past the verifier, 365
370 // this will stop the thread in a reasonable way, without crashing the JVM. 366 // We are resolved if the resolved reference cache entry contains a
371 __ call_VM(noreg, CAST_FROM_FN_PTR(address, 367 // non-null object (CallSite, etc.)
372 InterpreterRuntime::throw_IncompatibleClassChangeError)); 368 assert_different_registers(Otos_i, G3_scratch);
373 // the call_VM checks for exception, so we should never return here. 369 __ get_cache_index_at_bcp(Otos_i, G3_scratch, 1, index_size); // load index => G3_scratch
374 __ should_not_reach_here(); 370 __ load_resolved_reference_at_index(Otos_i, G3_scratch);
375 return; 371 __ tst(Otos_i);
376 } 372 __ br(Assembler::notEqual, false, Assembler::pt, resolved);
377 373 __ delayed()->set((int)bytecode(), O1);
378 Register Rcache = G3_scratch; 374
379 Register Rscratch = G4_scratch; 375 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);
380 376
381 resolve_cache_and_index(f12_oop, Otos_i, Rcache, Rscratch, wide ? sizeof(u2) : sizeof(u1)); 377 // first time invocation - must resolve first
382 378 __ call_VM(Otos_i, entry, O1);
379 __ bind(resolved);
383 __ verify_oop(Otos_i); 380 __ verify_oop(Otos_i);
384 381 }
385 Label L_done; 382
386 const Register Rcon_klass = G3_scratch; // same as Rcache
387 const Register Rarray_klass = G4_scratch; // same as Rscratch
388 __ load_klass(Otos_i, Rcon_klass);
389 AddressLiteral array_klass_addr((address)Universe::systemObjArrayKlassObj_addr());
390 __ load_contents(array_klass_addr, Rarray_klass);
391 __ cmp_and_brx_short(Rarray_klass, Rcon_klass, Assembler::notEqual, Assembler::pt, L_done);
392 __ ld(Address(Otos_i, arrayOopDesc::length_offset_in_bytes()), Rcon_klass);
393 __ tst(Rcon_klass);
394 __ brx(Assembler::zero, true, Assembler::pt, L_done);
395 __ delayed()->clr(Otos_i); // executed only if branch is taken
396
397 // Load the exception from the system-array which wraps it:
398 __ load_heap_oop(Otos_i, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i);
399 __ throw_if_not_x(Assembler::never, Interpreter::throw_exception_entry(), G3_scratch);
400
401 __ bind(L_done);
402 }
403 383
404 void TemplateTable::ldc2_w() { 384 void TemplateTable::ldc2_w() {
405 transition(vtos, vtos); 385 transition(vtos, vtos);
406 Label retry, resolved, Long, exit; 386 Label Long, exit;
407 387
408 __ bind(retry);
409 __ get_2_byte_integer_at_bcp(1, G3_scratch, O1, InterpreterMacroAssembler::Unsigned); 388 __ get_2_byte_integer_at_bcp(1, G3_scratch, O1, InterpreterMacroAssembler::Unsigned);
410 __ get_cpool_and_tags(O0, O2); 389 __ get_cpool_and_tags(O0, O2);
411 390
412 const int base_offset = constantPoolOopDesc::header_size() * wordSize; 391 const int base_offset = ConstantPool::header_size() * wordSize;
413 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; 392 const int tags_offset = Array<u1>::base_offset_in_bytes();
414 // get type from tags 393 // get type from tags
415 __ add(O2, tags_offset, O2); 394 __ add(O2, tags_offset, O2);
416 __ ldub(O2, O1, O2); 395 __ ldub(O2, O1, O2);
417 396
418 __ sll(O1, LogBytesPerWord, O1); 397 __ sll(O1, LogBytesPerWord, O1);
419 __ add(O0, O1, G3_scratch); 398 __ add(O0, O1, G3_scratch);
420 399
421 __ cmp_and_brx_short(O2, JVM_CONSTANT_Double, Assembler::notEqual, Assembler::pt, Long); 400 __ cmp_and_brx_short(O2, JVM_CONSTANT_Double, Assembler::notEqual, Assembler::pt, Long);
422 // A double can be placed at word-aligned locations in the constant pool. 401 // A double can be placed at word-aligned locations in the constant pool.
423 // Check out Conversions.java for an example. 402 // Check out Conversions.java for an example.
424 // Also constantPoolOopDesc::header_size() is 20, which makes it very difficult 403 // Also ConstantPool::header_size() is 20, which makes it very difficult
425 // to double-align double on the constant pool. SG, 11/7/97 404 // to double-align double on the constant pool. SG, 11/7/97
426 #ifdef _LP64 405 #ifdef _LP64
427 __ ldf(FloatRegisterImpl::D, G3_scratch, base_offset, Ftos_d); 406 __ ldf(FloatRegisterImpl::D, G3_scratch, base_offset, Ftos_d);
428 #else 407 #else
429 FloatRegister f = Ftos_d; 408 FloatRegister f = Ftos_d;
1593 __ float_cmp( is_float, unordered_result, F2, F0, Otos_i ); 1572 __ float_cmp( is_float, unordered_result, F2, F0, Otos_i );
1594 } 1573 }
1595 1574
1596 void TemplateTable::branch(bool is_jsr, bool is_wide) { 1575 void TemplateTable::branch(bool is_jsr, bool is_wide) {
1597 // Note: on SPARC, we use InterpreterMacroAssembler::if_cmp also. 1576 // Note: on SPARC, we use InterpreterMacroAssembler::if_cmp also.
1598 __ verify_oop(Lmethod);
1599 __ verify_thread(); 1577 __ verify_thread();
1600 1578
1601 const Register O2_bumped_count = O2; 1579 const Register O2_bumped_count = O2;
1602 __ profile_taken_branch(G3_scratch, O2_bumped_count); 1580 __ profile_taken_branch(G3_scratch, O2_bumped_count);
1603 1581
1609 // Handle all the JSR stuff here, then exit. 1587 // Handle all the JSR stuff here, then exit.
1610 // It's much shorter and cleaner than intermingling with the 1588 // It's much shorter and cleaner than intermingling with the
1611 // non-JSR normal-branch stuff occurring below. 1589 // non-JSR normal-branch stuff occurring below.
1612 if( is_jsr ) { 1590 if( is_jsr ) {
1613 // compute return address as bci in Otos_i 1591 // compute return address as bci in Otos_i
1614 __ ld_ptr(Lmethod, methodOopDesc::const_offset(), G3_scratch); 1592 __ ld_ptr(Lmethod, Method::const_offset(), G3_scratch);
1615 __ sub(Lbcp, G3_scratch, G3_scratch); 1593 __ sub(Lbcp, G3_scratch, G3_scratch);
1616 __ sub(G3_scratch, in_bytes(constMethodOopDesc::codes_offset()) - (is_wide ? 5 : 3), Otos_i); 1594 __ sub(G3_scratch, in_bytes(ConstMethod::codes_offset()) - (is_wide ? 5 : 3), Otos_i);
1617 1595
1618 // Bump Lbcp to target of JSR 1596 // Bump Lbcp to target of JSR
1619 __ add(Lbcp, O1_disp, Lbcp); 1597 __ add(Lbcp, O1_disp, Lbcp);
1620 // Push returnAddress for "ret" on stack 1598 // Push returnAddress for "ret" on stack
1621 __ push_ptr(Otos_i); 1599 __ push_ptr(Otos_i);
1643 Label Lno_mdo, Loverflow; 1621 Label Lno_mdo, Loverflow;
1644 int increment = InvocationCounter::count_increment; 1622 int increment = InvocationCounter::count_increment;
1645 int mask = ((1 << Tier0BackedgeNotifyFreqLog) - 1) << InvocationCounter::count_shift; 1623 int mask = ((1 << Tier0BackedgeNotifyFreqLog) - 1) << InvocationCounter::count_shift;
1646 if (ProfileInterpreter) { 1624 if (ProfileInterpreter) {
1647 // If no method data exists, go to profile_continue. 1625 // If no method data exists, go to profile_continue.
1648 __ ld_ptr(Lmethod, methodOopDesc::method_data_offset(), G4_scratch); 1626 __ ld_ptr(Lmethod, Method::method_data_offset(), G4_scratch);
1649 __ br_null_short(G4_scratch, Assembler::pn, Lno_mdo); 1627 __ br_null_short(G4_scratch, Assembler::pn, Lno_mdo);
1650 1628
1651 // Increment backedge counter in the MDO 1629 // Increment backedge counter in the MDO
1652 Address mdo_backedge_counter(G4_scratch, in_bytes(methodDataOopDesc::backedge_counter_offset()) + 1630 Address mdo_backedge_counter(G4_scratch, in_bytes(MethodData::backedge_counter_offset()) +
1653 in_bytes(InvocationCounter::counter_offset())); 1631 in_bytes(InvocationCounter::counter_offset()));
1654 __ increment_mask_and_jump(mdo_backedge_counter, increment, mask, G3_scratch, Lscratch, 1632 __ increment_mask_and_jump(mdo_backedge_counter, increment, mask, G3_scratch, Lscratch,
1655 Assembler::notZero, &Lforward); 1633 Assembler::notZero, &Lforward);
1656 __ ba_short(Loverflow); 1634 __ ba_short(Loverflow);
1657 } 1635 }
1658 1636
1659 // If there's no MDO, increment counter in methodOop 1637 // If there's no MDO, increment counter in Method*
1660 __ bind(Lno_mdo); 1638 __ bind(Lno_mdo);
1661 Address backedge_counter(Lmethod, in_bytes(methodOopDesc::backedge_counter_offset()) + 1639 Address backedge_counter(Lmethod, in_bytes(Method::backedge_counter_offset()) +
1662 in_bytes(InvocationCounter::counter_offset())); 1640 in_bytes(InvocationCounter::counter_offset()));
1663 __ increment_mask_and_jump(backedge_counter, increment, mask, G3_scratch, Lscratch, 1641 __ increment_mask_and_jump(backedge_counter, increment, mask, G3_scratch, Lscratch,
1664 Assembler::notZero, &Lforward); 1642 Assembler::notZero, &Lforward);
1665 __ bind(Loverflow); 1643 __ bind(Loverflow);
1666 1644
1789 #endif 1767 #endif
1790 #endif 1768 #endif
1791 1769
1792 __ profile_ret(vtos, Otos_i, G4_scratch); 1770 __ profile_ret(vtos, Otos_i, G4_scratch);
1793 1771
1794 __ ld_ptr(Lmethod, methodOopDesc::const_offset(), G3_scratch); 1772 __ ld_ptr(Lmethod, Method::const_offset(), G3_scratch);
1795 __ add(G3_scratch, Otos_i, G3_scratch); 1773 __ add(G3_scratch, Otos_i, G3_scratch);
1796 __ add(G3_scratch, in_bytes(constMethodOopDesc::codes_offset()), Lbcp); 1774 __ add(G3_scratch, in_bytes(ConstMethod::codes_offset()), Lbcp);
1797 __ dispatch_next(vtos); 1775 __ dispatch_next(vtos);
1798 } 1776 }
1799 1777
1800 1778
1801 void TemplateTable::wide_ret() { 1779 void TemplateTable::wide_ret() {
1804 __ access_local_returnAddress(G3_scratch, Otos_i); 1782 __ access_local_returnAddress(G3_scratch, Otos_i);
1805 // Otos_i contains the bci, compute the bcp from that 1783 // Otos_i contains the bci, compute the bcp from that
1806 1784
1807 __ profile_ret(vtos, Otos_i, G4_scratch); 1785 __ profile_ret(vtos, Otos_i, G4_scratch);
1808 1786
1809 __ ld_ptr(Lmethod, methodOopDesc::const_offset(), G3_scratch); 1787 __ ld_ptr(Lmethod, Method::const_offset(), G3_scratch);
1810 __ add(G3_scratch, Otos_i, G3_scratch); 1788 __ add(G3_scratch, Otos_i, G3_scratch);
1811 __ add(G3_scratch, in_bytes(constMethodOopDesc::codes_offset()), Lbcp); 1789 __ add(G3_scratch, in_bytes(ConstMethod::codes_offset()), Lbcp);
1812 __ dispatch_next(vtos); 1790 __ dispatch_next(vtos);
1813 } 1791 }
1814 1792
1815 1793
1816 void TemplateTable::tableswitch() { 1794 void TemplateTable::tableswitch() {
2084 __ membar( order_constraint ); 2062 __ membar( order_constraint );
2085 } 2063 }
2086 2064
2087 // ---------------------------------------------------------------------------- 2065 // ----------------------------------------------------------------------------
2088 void TemplateTable::resolve_cache_and_index(int byte_no, 2066 void TemplateTable::resolve_cache_and_index(int byte_no,
2089 Register result,
2090 Register Rcache, 2067 Register Rcache,
2091 Register index, 2068 Register index,
2092 size_t index_size) { 2069 size_t index_size) {
2093 // Depends on cpCacheOop layout! 2070 // Depends on cpCacheOop layout!
2094 Label resolved; 2071 Label resolved;
2095 2072
2096 if (byte_no == f12_oop) {
2097 // We are resolved if the f1 field contains a non-null object (CallSite, MethodType, etc.)
2098 // This kind of CP cache entry does not need to match bytecode_1 or bytecode_2, because
2099 // there is a 1-1 relation between bytecode type and CP entry type.
2100 // The caller will also load a methodOop from f2.
2101 assert(result != noreg, "");
2102 assert_different_registers(result, Rcache);
2103 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2104 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
2105 ConstantPoolCacheEntry::f1_offset(), result);
2106 __ tst(result);
2107 __ br(Assembler::notEqual, false, Assembler::pt, resolved);
2108 __ delayed()->set((int)bytecode(), O1);
2109 } else {
2110 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); 2073 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2111 assert(result == noreg, ""); //else change code for setting result
2112 __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, Lbyte_code, byte_no, 1, index_size); 2074 __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, Lbyte_code, byte_no, 1, index_size);
2113 __ cmp(Lbyte_code, (int) bytecode()); // have we resolved this bytecode? 2075 __ cmp(Lbyte_code, (int) bytecode()); // have we resolved this bytecode?
2114 __ br(Assembler::equal, false, Assembler::pt, resolved); 2076 __ br(Assembler::equal, false, Assembler::pt, resolved);
2115 __ delayed()->set((int)bytecode(), O1); 2077 __ delayed()->set((int)bytecode(), O1);
2116 }
2117 2078
2118 address entry; 2079 address entry;
2119 switch (bytecode()) { 2080 switch (bytecode()) {
2120 case Bytecodes::_getstatic : // fall through 2081 case Bytecodes::_getstatic : // fall through
2121 case Bytecodes::_putstatic : // fall through 2082 case Bytecodes::_putstatic : // fall through
2125 case Bytecodes::_invokespecial : // fall through 2086 case Bytecodes::_invokespecial : // fall through
2126 case Bytecodes::_invokestatic : // fall through 2087 case Bytecodes::_invokestatic : // fall through
2127 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break; 2088 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break;
2128 case Bytecodes::_invokehandle : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle); break; 2089 case Bytecodes::_invokehandle : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle); break;
2129 case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break; 2090 case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break;
2130 case Bytecodes::_fast_aldc : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break;
2131 case Bytecodes::_fast_aldc_w : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break;
2132 default: 2091 default:
2133 fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode()))); 2092 fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode())));
2134 break; 2093 break;
2135 } 2094 }
2136 // first time invocation - must resolve first 2095 // first time invocation - must resolve first
2137 __ call_VM(noreg, entry, O1); 2096 __ call_VM(noreg, entry, O1);
2138 // Update registers with resolved info 2097 // Update registers with resolved info
2139 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); 2098 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2140 if (result != noreg)
2141 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
2142 ConstantPoolCacheEntry::f1_offset(), result);
2143 __ bind(resolved); 2099 __ bind(resolved);
2144 } 2100 }
2145 2101
2146 void TemplateTable::load_invoke_cp_cache_entry(int byte_no, 2102 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2147 Register method, 2103 Register method,
2156 assert_different_registers(cache, method, itable_index); 2112 assert_different_registers(cache, method, itable_index);
2157 2113
2158 // determine constant pool cache field offsets 2114 // determine constant pool cache field offsets
2159 assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant"); 2115 assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant");
2160 const int method_offset = in_bytes( 2116 const int method_offset = in_bytes(
2161 constantPoolCacheOopDesc::base_offset() + 2117 ConstantPoolCache::base_offset() +
2162 ((byte_no == f2_byte) 2118 ((byte_no == f2_byte)
2163 ? ConstantPoolCacheEntry::f2_offset() 2119 ? ConstantPoolCacheEntry::f2_offset()
2164 : ConstantPoolCacheEntry::f1_offset() 2120 : ConstantPoolCacheEntry::f1_offset()
2165 ) 2121 )
2166 ); 2122 );
2167 const int flags_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + 2123 const int flags_offset = in_bytes(ConstantPoolCache::base_offset() +
2168 ConstantPoolCacheEntry::flags_offset()); 2124 ConstantPoolCacheEntry::flags_offset());
2169 // access constant pool cache fields 2125 // access constant pool cache fields
2170 const int index_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + 2126 const int index_offset = in_bytes(ConstantPoolCache::base_offset() +
2171 ConstantPoolCacheEntry::f2_offset()); 2127 ConstantPoolCacheEntry::f2_offset());
2172 2128
2173 if (is_invokevfinal) { 2129 if (is_invokevfinal) {
2174 __ get_cache_and_index_at_bcp(cache, index, 1); 2130 __ get_cache_and_index_at_bcp(cache, index, 1);
2175 __ ld_ptr(Address(cache, method_offset), method); 2131 __ ld_ptr(Address(cache, method_offset), method);
2176 } else if (byte_no == f12_oop) { 2132 } else {
2177 // Resolved f1_oop (CallSite, MethodType, etc.) goes into 'itable_index'.
2178 // Resolved f2_oop (methodOop invoker) will go into 'method' (at index_offset).
2179 // See ConstantPoolCacheEntry::set_dynamic_call and set_method_handle.
2180 size_t index_size = (is_invokedynamic ? sizeof(u4) : sizeof(u2)); 2133 size_t index_size = (is_invokedynamic ? sizeof(u4) : sizeof(u2));
2181 resolve_cache_and_index(byte_no, itable_index, cache, index, index_size); 2134 resolve_cache_and_index(byte_no, cache, index, index_size);
2182 __ ld_ptr(Address(cache, index_offset), method);
2183 itable_index = noreg; // hack to disable load below
2184 } else {
2185 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2));
2186 __ ld_ptr(Address(cache, method_offset), method); 2135 __ ld_ptr(Address(cache, method_offset), method);
2187 } 2136 }
2188 2137
2189 if (itable_index != noreg) { 2138 if (itable_index != noreg) {
2190 // pick up itable index from f2 also: 2139 // pick up itable or appendix index from f2 also:
2191 assert(byte_no == f1_byte, "already picked up f1");
2192 __ ld_ptr(Address(cache, index_offset), itable_index); 2140 __ ld_ptr(Address(cache, index_offset), itable_index);
2193 } 2141 }
2194 __ ld_ptr(Address(cache, flags_offset), flags); 2142 __ ld_ptr(Address(cache, flags_offset), flags);
2195 } 2143 }
2196 2144
2201 Register Roffset, 2149 Register Roffset,
2202 Register Rflags, 2150 Register Rflags,
2203 bool is_static) { 2151 bool is_static) {
2204 assert_different_registers(Rcache, Rflags, Roffset); 2152 assert_different_registers(Rcache, Rflags, Roffset);
2205 2153
2206 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2154 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2207 2155
2208 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags); 2156 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
2209 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset); 2157 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset);
2210 if (is_static) { 2158 if (is_static) {
2211 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f1_offset(), Robj); 2159 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f1_offset(), Robj);
2160 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
2161 __ ld_ptr( Robj, mirror_offset, Robj);
2212 } 2162 }
2213 } 2163 }
2214 2164
2215 // The registers Rcache and index expected to be set before call. 2165 // The registers Rcache and index expected to be set before call.
2216 // Correct values of the Rcache and index registers are preserved. 2166 // Correct values of the Rcache and index registers are preserved.
2217 void TemplateTable::jvmti_post_field_access(Register Rcache, 2167 void TemplateTable::jvmti_post_field_access(Register Rcache,
2218 Register index, 2168 Register index,
2219 bool is_static, 2169 bool is_static,
2220 bool has_tos) { 2170 bool has_tos) {
2221 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2171 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2222 2172
2223 if (JvmtiExport::can_post_field_access()) { 2173 if (JvmtiExport::can_post_field_access()) {
2224 // Check to see if a field access watch has been set before we take 2174 // Check to see if a field access watch has been set before we take
2225 // the time to call into the VM. 2175 // the time to call into the VM.
2226 Label Label1; 2176 Label Label1;
2262 Register Rcache = G3_scratch; 2212 Register Rcache = G3_scratch;
2263 Register index = G4_scratch; 2213 Register index = G4_scratch;
2264 Register Rclass = Rcache; 2214 Register Rclass = Rcache;
2265 Register Roffset= G4_scratch; 2215 Register Roffset= G4_scratch;
2266 Register Rflags = G1_scratch; 2216 Register Rflags = G1_scratch;
2267 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2217 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2268 2218
2269 resolve_cache_and_index(byte_no, noreg, Rcache, index, sizeof(u2)); 2219 resolve_cache_and_index(byte_no, Rcache, index, sizeof(u2));
2270 jvmti_post_field_access(Rcache, index, is_static, false); 2220 jvmti_post_field_access(Rcache, index, is_static, false);
2271 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static); 2221 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
2272 2222
2273 if (!is_static) { 2223 if (!is_static) {
2274 pop_and_check_object(Rclass); 2224 pop_and_check_object(Rclass);
2437 transition(atos, state); 2387 transition(atos, state);
2438 Register Rcache = G3_scratch; 2388 Register Rcache = G3_scratch;
2439 Register index = G4_scratch; 2389 Register index = G4_scratch;
2440 Register Roffset = G4_scratch; 2390 Register Roffset = G4_scratch;
2441 Register Rflags = Rcache; 2391 Register Rflags = Rcache;
2442 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2392 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2443 2393
2444 __ get_cache_and_index_at_bcp(Rcache, index, 1); 2394 __ get_cache_and_index_at_bcp(Rcache, index, 1);
2445 jvmti_post_field_access(Rcache, index, /*is_static*/false, /*has_tos*/true); 2395 jvmti_post_field_access(Rcache, index, /*is_static*/false, /*has_tos*/true);
2446 2396
2447 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset); 2397 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset);
2548 } 2498 }
2549 2499
2550 // The registers Rcache and index expected to be set before call. 2500 // The registers Rcache and index expected to be set before call.
2551 // The function may destroy various registers, just not the Rcache and index registers. 2501 // The function may destroy various registers, just not the Rcache and index registers.
2552 void TemplateTable::jvmti_post_field_mod(Register Rcache, Register index, bool is_static) { 2502 void TemplateTable::jvmti_post_field_mod(Register Rcache, Register index, bool is_static) {
2553 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2503 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2554 2504
2555 if (JvmtiExport::can_post_field_modification()) { 2505 if (JvmtiExport::can_post_field_modification()) {
2556 // Check to see if a field modification watch has been set before we take 2506 // Check to see if a field modification watch has been set before we take
2557 // the time to call into the VM. 2507 // the time to call into the VM.
2558 Label Label1; 2508 Label Label1;
2622 Register Rcache = G3_scratch; 2572 Register Rcache = G3_scratch;
2623 Register index = G4_scratch; 2573 Register index = G4_scratch;
2624 Register Rclass = Rcache; 2574 Register Rclass = Rcache;
2625 Register Roffset= G4_scratch; 2575 Register Roffset= G4_scratch;
2626 Register Rflags = G1_scratch; 2576 Register Rflags = G1_scratch;
2627 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2577 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2628 2578
2629 resolve_cache_and_index(byte_no, noreg, Rcache, index, sizeof(u2)); 2579 resolve_cache_and_index(byte_no, Rcache, index, sizeof(u2));
2630 jvmti_post_field_mod(Rcache, index, is_static); 2580 jvmti_post_field_mod(Rcache, index, is_static);
2631 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static); 2581 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
2632 2582
2633 Assembler::Membar_mask_bits read_bits = 2583 Assembler::Membar_mask_bits read_bits =
2634 Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore); 2584 Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
2829 transition(state, vtos); 2779 transition(state, vtos);
2830 Register Rcache = G3_scratch; 2780 Register Rcache = G3_scratch;
2831 Register Rclass = Rcache; 2781 Register Rclass = Rcache;
2832 Register Roffset= G4_scratch; 2782 Register Roffset= G4_scratch;
2833 Register Rflags = G1_scratch; 2783 Register Rflags = G1_scratch;
2834 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2784 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2835 2785
2836 jvmti_post_fast_field_mod(); 2786 jvmti_post_fast_field_mod();
2837 2787
2838 __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 1); 2788 __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 1);
2839 2789
2901 2851
2902 __ ld_ptr(Llocals, 0, Rreceiver); 2852 __ ld_ptr(Llocals, 0, Rreceiver);
2903 2853
2904 // access constant pool cache (is resolved) 2854 // access constant pool cache (is resolved)
2905 __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 2); 2855 __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 2);
2906 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f2_offset(), Roffset); 2856 __ ld_ptr(Rcache, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset(), Roffset);
2907 __ add(Lbcp, 1, Lbcp); // needed to report exception at the correct bcp 2857 __ add(Lbcp, 1, Lbcp); // needed to report exception at the correct bcp
2908 2858
2909 __ verify_oop(Rreceiver); 2859 __ verify_oop(Rreceiver);
2910 __ null_check(Rreceiver); 2860 __ null_check(Rreceiver);
2911 if (state == atos) { 2861 if (state == atos) {
2921 Assembler::Membar_mask_bits membar_bits = 2871 Assembler::Membar_mask_bits membar_bits =
2922 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore); 2872 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
2923 if (__ membar_has_effect(membar_bits)) { 2873 if (__ membar_has_effect(membar_bits)) {
2924 2874
2925 // Get is_volatile value in Rflags and check if membar is needed 2875 // Get is_volatile value in Rflags and check if membar is needed
2926 __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::flags_offset(), Rflags); 2876 __ ld_ptr(Rcache, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset(), Rflags);
2927 2877
2928 // Test volatile 2878 // Test volatile
2929 Label notVolatile; 2879 Label notVolatile;
2930 __ set((1 << ConstantPoolCacheEntry::is_volatile_shift), Lscratch); 2880 __ set((1 << ConstantPoolCacheEntry::is_volatile_shift), Lscratch);
2931 __ btst(Rflags, Lscratch); 2881 __ btst(Rflags, Lscratch);
2944 2894
2945 void TemplateTable::count_calls(Register method, Register temp) { 2895 void TemplateTable::count_calls(Register method, Register temp) {
2946 // implemented elsewhere 2896 // implemented elsewhere
2947 ShouldNotReachHere(); 2897 ShouldNotReachHere();
2948 } 2898 }
2949
2950 2899
2951 void TemplateTable::prepare_invoke(int byte_no, 2900 void TemplateTable::prepare_invoke(int byte_no,
2952 Register method, // linked method (or i-klass) 2901 Register method, // linked method (or i-klass)
2953 Register ra, // return address 2902 Register ra, // return address
2954 Register index, // itable index, MethodType, etc. 2903 Register index, // itable index, MethodType, etc.
2978 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore 2927 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
2979 2928
2980 // maybe push appendix to arguments 2929 // maybe push appendix to arguments
2981 if (is_invokedynamic || is_invokehandle) { 2930 if (is_invokedynamic || is_invokehandle) {
2982 Label L_no_push; 2931 Label L_no_push;
2983 __ verify_oop(index);
2984 __ set((1 << ConstantPoolCacheEntry::has_appendix_shift), temp); 2932 __ set((1 << ConstantPoolCacheEntry::has_appendix_shift), temp);
2985 __ btst(flags, temp); 2933 __ btst(flags, temp);
2986 __ br(Assembler::zero, false, Assembler::pt, L_no_push); 2934 __ br(Assembler::zero, false, Assembler::pt, L_no_push);
2987 __ delayed()->nop(); 2935 __ delayed()->nop();
2988 // Push the appendix as a trailing parameter. 2936 // Push the appendix as a trailing parameter.
2989 // This must be done before we get the receiver, 2937 // This must be done before we get the receiver,
2990 // since the parameter_size includes it. 2938 // since the parameter_size includes it.
2991 __ push_ptr(index); // push appendix (MethodType, CallSite, etc.) 2939 __ load_resolved_reference_at_index(temp, index);
2940 __ verify_oop(temp);
2941 __ push_ptr(temp); // push appendix (MethodType, CallSite, etc.)
2992 __ bind(L_no_push); 2942 __ bind(L_no_push);
2993 } 2943 }
2994 2944
2995 // load receiver if needed (after appendix is pushed so parameter size is correct) 2945 // load receiver if needed (after appendix is pushed so parameter size is correct)
2996 if (load_receiver) { 2946 if (load_receiver) {
3019 void TemplateTable::generate_vtable_call(Register Rrecv, Register Rindex, Register Rret) { 2969 void TemplateTable::generate_vtable_call(Register Rrecv, Register Rindex, Register Rret) {
3020 Register Rtemp = G4_scratch; 2970 Register Rtemp = G4_scratch;
3021 Register Rcall = Rindex; 2971 Register Rcall = Rindex;
3022 assert_different_registers(Rcall, G5_method, Gargs, Rret); 2972 assert_different_registers(Rcall, G5_method, Gargs, Rret);
3023 2973
3024 // get target methodOop & entry point 2974 // get target Method* & entry point
3025 __ lookup_virtual_method(Rrecv, Rindex, G5_method); 2975 __ lookup_virtual_method(Rrecv, Rindex, G5_method);
3026 __ call_from_interpreter(Rcall, Gargs, Rret); 2976 __ call_from_interpreter(Rcall, Gargs, Rret);
3027 } 2977 }
3028 2978
3029 void TemplateTable::invokevirtual(int byte_no) { 2979 void TemplateTable::invokevirtual(int byte_no) {
3086 } 3036 }
3087 3037
3088 void TemplateTable::invokevfinal_helper(Register Rscratch, Register Rret) { 3038 void TemplateTable::invokevfinal_helper(Register Rscratch, Register Rret) {
3089 Register Rtemp = G4_scratch; 3039 Register Rtemp = G4_scratch;
3090 3040
3091 __ verify_oop(G5_method);
3092
3093 // Load receiver from stack slot 3041 // Load receiver from stack slot
3094 __ lduh(G5_method, in_bytes(methodOopDesc::size_of_parameters_offset()), G4_scratch); 3042 __ lduh(G5_method, in_bytes(Method::size_of_parameters_offset()), G4_scratch);
3095 __ load_receiver(G4_scratch, O0); 3043 __ load_receiver(G4_scratch, O0);
3096 3044
3097 // receiver NULL check 3045 // receiver NULL check
3098 __ null_check(O0); 3046 __ null_check(O0);
3099 3047
3124 3072
3125 prepare_invoke(byte_no, G5_method, Rret, noreg, O0_recv); // get receiver also for null check 3073 prepare_invoke(byte_no, G5_method, Rret, noreg, O0_recv); // get receiver also for null check
3126 __ null_check(O0_recv); 3074 __ null_check(O0_recv);
3127 3075
3128 // do the call 3076 // do the call
3129 __ verify_oop(G5_method);
3130 __ profile_call(O4); 3077 __ profile_call(O4);
3131 __ call_from_interpreter(Rscratch, Gargs, Rret); 3078 __ call_from_interpreter(Rscratch, Gargs, Rret);
3132 } 3079 }
3133 3080
3134 3081
3137 assert(byte_no == f1_byte, "use this argument"); 3084 assert(byte_no == f1_byte, "use this argument");
3138 3085
3139 const Register Rret = Lscratch; 3086 const Register Rret = Lscratch;
3140 const Register Rscratch = G3_scratch; 3087 const Register Rscratch = G3_scratch;
3141 3088
3142 prepare_invoke(byte_no, G5_method, Rret); // get f1 methodOop 3089 prepare_invoke(byte_no, G5_method, Rret); // get f1 Method*
3143 3090
3144 // do the call 3091 // do the call
3145 __ verify_oop(G5_method);
3146 __ profile_call(O4); 3092 __ profile_call(O4);
3147 __ call_from_interpreter(Rscratch, Gargs, Rret); 3093 __ call_from_interpreter(Rscratch, Gargs, Rret);
3148 } 3094 }
3149 3095
3150 3096 void TemplateTable::invokeinterface_object_method(Register RKlass,
3151 void TemplateTable::invokeinterface_object_method(Register RklassOop,
3152 Register Rcall, 3097 Register Rcall,
3153 Register Rret, 3098 Register Rret,
3154 Register Rflags) { 3099 Register Rflags) {
3155 Register Rscratch = G4_scratch; 3100 Register Rscratch = G4_scratch;
3156 Register Rindex = Lscratch; 3101 Register Rindex = Lscratch;
3165 __ br(Assembler::zero, false, Assembler::pt, notFinal); 3110 __ br(Assembler::zero, false, Assembler::pt, notFinal);
3166 __ delayed()->nop(); 3111 __ delayed()->nop();
3167 3112
3168 __ profile_final_call(O4); 3113 __ profile_final_call(O4);
3169 3114
3170 // do the call - the index (f2) contains the methodOop 3115 // do the call - the index (f2) contains the Method*
3171 assert_different_registers(G5_method, Gargs, Rcall); 3116 assert_different_registers(G5_method, Gargs, Rcall);
3172 __ mov(Rindex, G5_method); 3117 __ mov(Rindex, G5_method);
3173 __ call_from_interpreter(Rcall, Gargs, Rret); 3118 __ call_from_interpreter(Rcall, Gargs, Rret);
3174 __ bind(notFinal); 3119 __ bind(notFinal);
3175 3120
3176 __ profile_virtual_call(RklassOop, O4); 3121 __ profile_virtual_call(RKlass, O4);
3177 generate_vtable_call(RklassOop, Rindex, Rret); 3122 generate_vtable_call(RKlass, Rindex, Rret);
3178 } 3123 }
3179 3124
3180 3125
3181 void TemplateTable::invokeinterface(int byte_no) { 3126 void TemplateTable::invokeinterface(int byte_no) {
3182 transition(vtos, vtos); 3127 transition(vtos, vtos);
3185 const Register Rinterface = G1_scratch; 3130 const Register Rinterface = G1_scratch;
3186 const Register Rret = G3_scratch; 3131 const Register Rret = G3_scratch;
3187 const Register Rindex = Lscratch; 3132 const Register Rindex = Lscratch;
3188 const Register O0_recv = O0; 3133 const Register O0_recv = O0;
3189 const Register O1_flags = O1; 3134 const Register O1_flags = O1;
3190 const Register O2_klassOop = O2; 3135 const Register O2_Klass = O2;
3191 const Register Rscratch = G4_scratch; 3136 const Register Rscratch = G4_scratch;
3192 assert_different_registers(Rscratch, G5_method); 3137 assert_different_registers(Rscratch, G5_method);
3193 3138
3194 prepare_invoke(byte_no, Rinterface, Rret, Rindex, O0_recv, O1_flags); 3139 prepare_invoke(byte_no, Rinterface, Rret, Rindex, O0_recv, O1_flags);
3195 3140
3196 // get receiver klass 3141 // get receiver klass
3197 __ null_check(O0_recv, oopDesc::klass_offset_in_bytes()); 3142 __ null_check(O0_recv, oopDesc::klass_offset_in_bytes());
3198 __ load_klass(O0_recv, O2_klassOop); 3143 __ load_klass(O0_recv, O2_Klass);
3199 __ verify_oop(O2_klassOop);
3200 3144
3201 // Special case of invokeinterface called for virtual method of 3145 // Special case of invokeinterface called for virtual method of
3202 // java.lang.Object. See cpCacheOop.cpp for details. 3146 // java.lang.Object. See cpCacheOop.cpp for details.
3203 // This code isn't produced by javac, but could be produced by 3147 // This code isn't produced by javac, but could be produced by
3204 // another compliant java compiler. 3148 // another compliant java compiler.
3206 __ set((1 << ConstantPoolCacheEntry::is_forced_virtual_shift), Rscratch); 3150 __ set((1 << ConstantPoolCacheEntry::is_forced_virtual_shift), Rscratch);
3207 __ btst(O1_flags, Rscratch); 3151 __ btst(O1_flags, Rscratch);
3208 __ br(Assembler::zero, false, Assembler::pt, notMethod); 3152 __ br(Assembler::zero, false, Assembler::pt, notMethod);
3209 __ delayed()->nop(); 3153 __ delayed()->nop();
3210 3154
3211 invokeinterface_object_method(O2_klassOop, Rinterface, Rret, O1_flags); 3155 invokeinterface_object_method(O2_Klass, Rinterface, Rret, O1_flags);
3212 3156
3213 __ bind(notMethod); 3157 __ bind(notMethod);
3214 3158
3215 __ profile_virtual_call(O2_klassOop, O4); 3159 __ profile_virtual_call(O2_Klass, O4);
3216 3160
3217 // 3161 //
3218 // find entry point to call 3162 // find entry point to call
3219 // 3163 //
3220 3164
3221 // compute start of first itableOffsetEntry (which is at end of vtable) 3165 // compute start of first itableOffsetEntry (which is at end of vtable)
3222 const int base = instanceKlass::vtable_start_offset() * wordSize; 3166 const int base = InstanceKlass::vtable_start_offset() * wordSize;
3223 Label search; 3167 Label search;
3224 Register Rtemp = O1_flags; 3168 Register Rtemp = O1_flags;
3225 3169
3226 __ ld(O2_klassOop, instanceKlass::vtable_length_offset() * wordSize, Rtemp); 3170 __ ld(O2_Klass, InstanceKlass::vtable_length_offset() * wordSize, Rtemp);
3227 if (align_object_offset(1) > 1) { 3171 if (align_object_offset(1) > 1) {
3228 __ round_to(Rtemp, align_object_offset(1)); 3172 __ round_to(Rtemp, align_object_offset(1));
3229 } 3173 }
3230 __ sll(Rtemp, LogBytesPerWord, Rtemp); // Rscratch *= 4; 3174 __ sll(Rtemp, LogBytesPerWord, Rtemp); // Rscratch *= 4;
3231 if (Assembler::is_simm13(base)) { 3175 if (Assembler::is_simm13(base)) {
3232 __ add(Rtemp, base, Rtemp); 3176 __ add(Rtemp, base, Rtemp);
3233 } else { 3177 } else {
3234 __ set(base, Rscratch); 3178 __ set(base, Rscratch);
3235 __ add(Rscratch, Rtemp, Rtemp); 3179 __ add(Rscratch, Rtemp, Rtemp);
3236 } 3180 }
3237 __ add(O2_klassOop, Rtemp, Rscratch); 3181 __ add(O2_Klass, Rtemp, Rscratch);
3238 3182
3239 __ bind(search); 3183 __ bind(search);
3240 3184
3241 __ ld_ptr(Rscratch, itableOffsetEntry::interface_offset_in_bytes(), Rtemp); 3185 __ ld_ptr(Rscratch, itableOffsetEntry::interface_offset_in_bytes(), Rtemp);
3242 { 3186 {
3249 // need to check again. 3193 // need to check again.
3250 __ br_notnull_short( Rtemp, Assembler::pt, ok); 3194 __ br_notnull_short( Rtemp, Assembler::pt, ok);
3251 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_IncompatibleClassChangeError)); 3195 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_IncompatibleClassChangeError));
3252 __ should_not_reach_here(); 3196 __ should_not_reach_here();
3253 __ bind(ok); 3197 __ bind(ok);
3254 __ verify_oop(Rtemp); 3198 }
3255 }
3256
3257 __ verify_oop(Rinterface);
3258 3199
3259 __ cmp(Rinterface, Rtemp); 3200 __ cmp(Rinterface, Rtemp);
3260 __ brx(Assembler::notEqual, true, Assembler::pn, search); 3201 __ brx(Assembler::notEqual, true, Assembler::pn, search);
3261 __ delayed()->add(Rscratch, itableOffsetEntry::size() * wordSize, Rscratch); 3202 __ delayed()->add(Rscratch, itableOffsetEntry::size() * wordSize, Rscratch);
3262 3203
3264 __ ld(Rscratch, itableOffsetEntry::offset_offset_in_bytes(), Rscratch); 3205 __ ld(Rscratch, itableOffsetEntry::offset_offset_in_bytes(), Rscratch);
3265 3206
3266 assert(itableMethodEntry::method_offset_in_bytes() == 0, "adjust instruction below"); 3207 assert(itableMethodEntry::method_offset_in_bytes() == 0, "adjust instruction below");
3267 __ sll(Rindex, exact_log2(itableMethodEntry::size() * wordSize), Rindex); // Rindex *= 8; 3208 __ sll(Rindex, exact_log2(itableMethodEntry::size() * wordSize), Rindex); // Rindex *= 8;
3268 __ add(Rscratch, Rindex, Rscratch); 3209 __ add(Rscratch, Rindex, Rscratch);
3269 __ ld_ptr(O2_klassOop, Rscratch, G5_method); 3210 __ ld_ptr(O2_Klass, Rscratch, G5_method);
3270 3211
3271 // Check for abstract method error. 3212 // Check for abstract method error.
3272 { 3213 {
3273 Label ok; 3214 Label ok;
3274 __ br_notnull_short(G5_method, Assembler::pt, ok); 3215 __ br_notnull_short(G5_method, Assembler::pt, ok);
3278 } 3219 }
3279 3220
3280 Register Rcall = Rinterface; 3221 Register Rcall = Rinterface;
3281 assert_different_registers(Rcall, G5_method, Gargs, Rret); 3222 assert_different_registers(Rcall, G5_method, Gargs, Rret);
3282 3223
3283 __ verify_oop(G5_method);
3284 __ call_from_interpreter(Rcall, Gargs, Rret); 3224 __ call_from_interpreter(Rcall, Gargs, Rret);
3285 } 3225 }
3286
3287 3226
3288 void TemplateTable::invokehandle(int byte_no) { 3227 void TemplateTable::invokehandle(int byte_no) {
3289 transition(vtos, vtos); 3228 transition(vtos, vtos);
3290 assert(byte_no == f12_oop, "use this argument"); 3229 assert(byte_no == f1_byte, "use this argument");
3291 3230
3292 if (!EnableInvokeDynamic) { 3231 if (!EnableInvokeDynamic) {
3293 // rewriter does not generate this bytecode 3232 // rewriter does not generate this bytecode
3294 __ should_not_reach_here(); 3233 __ should_not_reach_here();
3295 return; 3234 return;
3301 const Register Rscratch = G3_scratch; 3240 const Register Rscratch = G3_scratch;
3302 3241
3303 prepare_invoke(byte_no, G5_method, Rret, G4_mtype, O0_recv); 3242 prepare_invoke(byte_no, G5_method, Rret, G4_mtype, O0_recv);
3304 __ null_check(O0_recv); 3243 __ null_check(O0_recv);
3305 3244
3306 // G4: MethodType object (from f1) 3245 // G4: MethodType object (from cpool->resolved_references[])
3307 // G5: MH.linkToCallSite method (from f2) 3246 // G5: MH.linkToCallSite method (from f2)
3308 3247
3309 // Note: G4_mtype is already pushed (if necessary) by prepare_invoke 3248 // Note: G4_mtype is already pushed (if necessary) by prepare_invoke
3310 3249
3311 // do the call 3250 // do the call
3312 __ verify_oop(G5_method); 3251 __ verify_oop(G4_mtype);
3313 __ profile_final_call(O4); // FIXME: profile the LambdaForm also 3252 __ profile_final_call(O4); // FIXME: profile the LambdaForm also
3314 __ call_from_interpreter(Rscratch, Gargs, Rret); 3253 __ call_from_interpreter(Rscratch, Gargs, Rret);
3315 } 3254 }
3316 3255
3317 3256
3318 void TemplateTable::invokedynamic(int byte_no) { 3257 void TemplateTable::invokedynamic(int byte_no) {
3319 transition(vtos, vtos); 3258 transition(vtos, vtos);
3320 assert(byte_no == f12_oop, "use this argument"); 3259 assert(byte_no == f1_byte, "use this argument");
3321 3260
3322 if (!EnableInvokeDynamic) { 3261 if (!EnableInvokeDynamic) {
3323 // We should not encounter this bytecode if !EnableInvokeDynamic. 3262 // We should not encounter this bytecode if !EnableInvokeDynamic.
3324 // The verifier will stop it. However, if we get past the verifier, 3263 // The verifier will stop it. However, if we get past the verifier,
3325 // this will stop the thread in a reasonable way, without crashing the JVM. 3264 // this will stop the thread in a reasonable way, without crashing the JVM.
3334 const Register G4_callsite = G4_scratch; 3273 const Register G4_callsite = G4_scratch;
3335 const Register Rscratch = G3_scratch; 3274 const Register Rscratch = G3_scratch;
3336 3275
3337 prepare_invoke(byte_no, G5_method, Rret, G4_callsite); 3276 prepare_invoke(byte_no, G5_method, Rret, G4_callsite);
3338 3277
3339 // G4: CallSite object (from f1) 3278 // G4: CallSite object (from cpool->resolved_references[])
3340 // G5: MH.linkToCallSite method (from f2) 3279 // G5: MH.linkToCallSite method (from f1)
3341 3280
3342 // Note: G4_callsite is already pushed by prepare_invoke 3281 // Note: G4_callsite is already pushed by prepare_invoke
3343 3282
3344 // %%% should make a type profile for any invokedynamic that takes a ref argument 3283 // %%% should make a type profile for any invokedynamic that takes a ref argument
3345 // profile this call 3284 // profile this call
3346 __ profile_call(O4); 3285 __ profile_call(O4);
3347 3286
3348 // do the call 3287 // do the call
3349 __ verify_oop(G5_method); 3288 __ verify_oop(G4_callsite);
3350 __ call_from_interpreter(Rscratch, Gargs, Rret); 3289 __ call_from_interpreter(Rscratch, Gargs, Rret);
3351 } 3290 }
3352 3291
3353 3292
3354 //---------------------------------------------------------------------------------------------------- 3293 //----------------------------------------------------------------------------------------------------
3368 Register Rscratch = O4; 3307 Register Rscratch = O4;
3369 3308
3370 __ get_2_byte_integer_at_bcp(1, Rscratch, Roffset, InterpreterMacroAssembler::Unsigned); 3309 __ get_2_byte_integer_at_bcp(1, Rscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3371 __ get_cpool_and_tags(Rscratch, G3_scratch); 3310 __ get_cpool_and_tags(Rscratch, G3_scratch);
3372 // make sure the class we're about to instantiate has been resolved 3311 // make sure the class we're about to instantiate has been resolved
3373 // This is done before loading instanceKlass to be consistent with the order 3312 // This is done before loading InstanceKlass to be consistent with the order
3374 // how Constant Pool is updated (see constantPoolOopDesc::klass_at_put) 3313 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3375 __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch); 3314 __ add(G3_scratch, Array<u1>::base_offset_in_bytes(), G3_scratch);
3376 __ ldub(G3_scratch, Roffset, G3_scratch); 3315 __ ldub(G3_scratch, Roffset, G3_scratch);
3377 __ cmp(G3_scratch, JVM_CONSTANT_Class); 3316 __ cmp(G3_scratch, JVM_CONSTANT_Class);
3378 __ br(Assembler::notEqual, false, Assembler::pn, slow_case); 3317 __ br(Assembler::notEqual, false, Assembler::pn, slow_case);
3379 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset); 3318 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3380 // get instanceKlass 3319 // get InstanceKlass
3381 //__ sll(Roffset, LogBytesPerWord, Roffset); // executed in delay slot 3320 //__ sll(Roffset, LogBytesPerWord, Roffset); // executed in delay slot
3382 __ add(Roffset, sizeof(constantPoolOopDesc), Roffset); 3321 __ add(Roffset, sizeof(ConstantPool), Roffset);
3383 __ ld_ptr(Rscratch, Roffset, RinstanceKlass); 3322 __ ld_ptr(Rscratch, Roffset, RinstanceKlass);
3384 3323
3385 // make sure klass is fully initialized: 3324 // make sure klass is fully initialized:
3386 __ ldub(RinstanceKlass, in_bytes(instanceKlass::init_state_offset()), G3_scratch); 3325 __ ldub(RinstanceKlass, in_bytes(InstanceKlass::init_state_offset()), G3_scratch);
3387 __ cmp(G3_scratch, instanceKlass::fully_initialized); 3326 __ cmp(G3_scratch, InstanceKlass::fully_initialized);
3388 __ br(Assembler::notEqual, false, Assembler::pn, slow_case); 3327 __ br(Assembler::notEqual, false, Assembler::pn, slow_case);
3389 __ delayed()->ld(RinstanceKlass, in_bytes(Klass::layout_helper_offset()), Roffset); 3328 __ delayed()->ld(RinstanceKlass, in_bytes(Klass::layout_helper_offset()), Roffset);
3390 3329
3391 // get instance_size in instanceKlass (already aligned) 3330 // get instance_size in InstanceKlass (already aligned)
3392 //__ ld(RinstanceKlass, in_bytes(Klass::layout_helper_offset()), Roffset); 3331 //__ ld(RinstanceKlass, in_bytes(Klass::layout_helper_offset()), Roffset);
3393 3332
3394 // make sure klass does not have has_finalizer, or is abstract, or interface or java/lang/Class 3333 // make sure klass does not have has_finalizer, or is abstract, or interface or java/lang/Class
3395 __ btst(Klass::_lh_instance_slow_path_bit, Roffset); 3334 __ btst(Klass::_lh_instance_slow_path_bit, Roffset);
3396 __ br(Assembler::notZero, false, Assembler::pn, slow_case); 3335 __ br(Assembler::notZero, false, Assembler::pn, slow_case);
3582 // Get constant pool tag 3521 // Get constant pool tag
3583 __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned); 3522 __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3584 3523
3585 // See if the checkcast has been quickened 3524 // See if the checkcast has been quickened
3586 __ get_cpool_and_tags(Lscratch, G3_scratch); 3525 __ get_cpool_and_tags(Lscratch, G3_scratch);
3587 __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch); 3526 __ add(G3_scratch, Array<u1>::base_offset_in_bytes(), G3_scratch);
3588 __ ldub(G3_scratch, Roffset, G3_scratch); 3527 __ ldub(G3_scratch, Roffset, G3_scratch);
3589 __ cmp(G3_scratch, JVM_CONSTANT_Class); 3528 __ cmp(G3_scratch, JVM_CONSTANT_Class);
3590 __ br(Assembler::equal, true, Assembler::pt, quicked); 3529 __ br(Assembler::equal, true, Assembler::pt, quicked);
3591 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset); 3530 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3592 3531
3593 __ push_ptr(); // save receiver for result, and for GC 3532 __ push_ptr(); // save receiver for result, and for GC
3594 call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) ); 3533 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3534 __ get_vm_result_2(RspecifiedKlass);
3595 __ pop_ptr(Otos_i, G3_scratch); // restore receiver 3535 __ pop_ptr(Otos_i, G3_scratch); // restore receiver
3596 3536
3597 __ ba_short(resolved); 3537 __ ba_short(resolved);
3598 3538
3599 // Extract target class from constant pool 3539 // Extract target class from constant pool
3600 __ bind(quicked); 3540 __ bind(quicked);
3601 __ add(Roffset, sizeof(constantPoolOopDesc), Roffset); 3541 __ add(Roffset, sizeof(ConstantPool), Roffset);
3602 __ ld_ptr(Lscratch, Roffset, RspecifiedKlass); 3542 __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
3603 __ bind(resolved); 3543 __ bind(resolved);
3604 __ load_klass(Otos_i, RobjKlass); // get value klass 3544 __ load_klass(Otos_i, RobjKlass); // get value klass
3605 3545
3606 // Generate a fast subtype check. Branch to cast_ok if no 3546 // Generate a fast subtype check. Branch to cast_ok if no
3637 // Get constant pool tag 3577 // Get constant pool tag
3638 __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned); 3578 __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3639 3579
3640 // See if the checkcast has been quickened 3580 // See if the checkcast has been quickened
3641 __ get_cpool_and_tags(Lscratch, G3_scratch); 3581 __ get_cpool_and_tags(Lscratch, G3_scratch);
3642 __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch); 3582 __ add(G3_scratch, Array<u1>::base_offset_in_bytes(), G3_scratch);
3643 __ ldub(G3_scratch, Roffset, G3_scratch); 3583 __ ldub(G3_scratch, Roffset, G3_scratch);
3644 __ cmp(G3_scratch, JVM_CONSTANT_Class); 3584 __ cmp(G3_scratch, JVM_CONSTANT_Class);
3645 __ br(Assembler::equal, true, Assembler::pt, quicked); 3585 __ br(Assembler::equal, true, Assembler::pt, quicked);
3646 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset); 3586 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3647 3587
3648 __ push_ptr(); // save receiver for result, and for GC 3588 __ push_ptr(); // save receiver for result, and for GC
3649 call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) ); 3589 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3590 __ get_vm_result_2(RspecifiedKlass);
3650 __ pop_ptr(Otos_i, G3_scratch); // restore receiver 3591 __ pop_ptr(Otos_i, G3_scratch); // restore receiver
3651 3592
3652 __ ba_short(resolved); 3593 __ ba_short(resolved);
3653 3594
3654 // Extract target class from constant pool 3595 // Extract target class from constant pool
3655 __ bind(quicked); 3596 __ bind(quicked);
3656 __ add(Roffset, sizeof(constantPoolOopDesc), Roffset); 3597 __ add(Roffset, sizeof(ConstantPool), Roffset);
3657 __ get_constant_pool(Lscratch); 3598 __ get_constant_pool(Lscratch);
3658 __ ld_ptr(Lscratch, Roffset, RspecifiedKlass); 3599 __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
3659 __ bind(resolved); 3600 __ bind(resolved);
3660 __ load_klass(Otos_i, RobjKlass); // get value klass 3601 __ load_klass(Otos_i, RobjKlass); // get value klass
3661 3602