comparison src/cpu/x86/vm/templateTable_x86_64.cpp @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 1d7922586cf6
children 75f33eecc1b3
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
1 /* 1 /*
2 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved. 2 * Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
25 #include "precompiled.hpp" 25 #include "precompiled.hpp"
26 #include "interpreter/interpreter.hpp" 26 #include "interpreter/interpreter.hpp"
27 #include "interpreter/interpreterRuntime.hpp" 27 #include "interpreter/interpreterRuntime.hpp"
28 #include "interpreter/templateTable.hpp" 28 #include "interpreter/templateTable.hpp"
29 #include "memory/universe.inline.hpp" 29 #include "memory/universe.inline.hpp"
30 #include "oops/methodDataOop.hpp" 30 #include "oops/methodData.hpp"
31 #include "oops/objArrayKlass.hpp" 31 #include "oops/objArrayKlass.hpp"
32 #include "oops/oop.inline.hpp" 32 #include "oops/oop.inline.hpp"
33 #include "prims/methodHandles.hpp" 33 #include "prims/methodHandles.hpp"
34 #include "runtime/sharedRuntime.hpp" 34 #include "runtime/sharedRuntime.hpp"
35 #include "runtime/stubRoutines.hpp" 35 #include "runtime/stubRoutines.hpp"
365 } else { 365 } else {
366 __ load_unsigned_byte(rbx, at_bcp(1)); 366 __ load_unsigned_byte(rbx, at_bcp(1));
367 } 367 }
368 368
369 __ get_cpool_and_tags(rcx, rax); 369 __ get_cpool_and_tags(rcx, rax);
370 const int base_offset = constantPoolOopDesc::header_size() * wordSize; 370 const int base_offset = ConstantPool::header_size() * wordSize;
371 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; 371 const int tags_offset = Array<u1>::base_offset_in_bytes();
372 372
373 // get type 373 // get type
374 __ movzbl(rdx, Address(rax, rbx, Address::times_1, tags_offset)); 374 __ movzbl(rdx, Address(rax, rbx, Address::times_1, tags_offset));
375
376 // unresolved string - get the resolved string
377 __ cmpl(rdx, JVM_CONSTANT_UnresolvedString);
378 __ jccb(Assembler::equal, call_ldc);
379 375
380 // unresolved class - get the resolved class 376 // unresolved class - get the resolved class
381 __ cmpl(rdx, JVM_CONSTANT_UnresolvedClass); 377 __ cmpl(rdx, JVM_CONSTANT_UnresolvedClass);
382 __ jccb(Assembler::equal, call_ldc); 378 __ jccb(Assembler::equal, call_ldc);
383 379
409 #ifdef ASSERT 405 #ifdef ASSERT
410 { 406 {
411 Label L; 407 Label L;
412 __ cmpl(rdx, JVM_CONSTANT_Integer); 408 __ cmpl(rdx, JVM_CONSTANT_Integer);
413 __ jcc(Assembler::equal, L); 409 __ jcc(Assembler::equal, L);
414 __ cmpl(rdx, JVM_CONSTANT_String); 410 // String and Object are rewritten to fast_aldc
415 __ jcc(Assembler::equal, L);
416 __ cmpl(rdx, JVM_CONSTANT_Object);
417 __ jcc(Assembler::equal, L);
418 __ stop("unexpected tag type in ldc"); 411 __ stop("unexpected tag type in ldc");
419 __ bind(L); 412 __ bind(L);
420 } 413 }
421 #endif 414 #endif
422 // atos and itos 415 // itos JVM_CONSTANT_Integer only
423 Label isOop;
424 __ cmpl(rdx, JVM_CONSTANT_Integer);
425 __ jcc(Assembler::notEqual, isOop);
426 __ movl(rax, Address(rcx, rbx, Address::times_8, base_offset)); 416 __ movl(rax, Address(rcx, rbx, Address::times_8, base_offset));
427 __ push_i(rax); 417 __ push_i(rax);
428 __ jmp(Done);
429
430 __ bind(isOop);
431 __ movptr(rax, Address(rcx, rbx, Address::times_8, base_offset));
432 __ push_ptr(rax);
433
434 if (VerifyOops) {
435 __ verify_oop(rax);
436 }
437
438 __ bind(Done); 418 __ bind(Done);
439 } 419 }
440 420
441 // Fast path for caching oop constants. 421 // Fast path for caching oop constants.
442 // %%% We should use this to handle Class and String constants also.
443 // %%% It will simplify the ldc/primitive path considerably.
444 void TemplateTable::fast_aldc(bool wide) { 422 void TemplateTable::fast_aldc(bool wide) {
445 transition(vtos, atos); 423 transition(vtos, atos);
446 424
447 if (!EnableInvokeDynamic) { 425 Register result = rax;
448 // We should not encounter this bytecode if !EnableInvokeDynamic. 426 Register tmp = rdx;
449 // The verifier will stop it. However, if we get past the verifier, 427 int index_size = wide ? sizeof(u2) : sizeof(u1);
450 // this will stop the thread in a reasonable way, without crashing the JVM. 428
451 __ call_VM(noreg, CAST_FROM_FN_PTR(address, 429 Label resolved;
452 InterpreterRuntime::throw_IncompatibleClassChangeError)); 430
453 // the call_VM checks for exception, so we should never return here. 431 // We are resolved if the resolved reference cache entry contains a
454 __ should_not_reach_here(); 432 // non-null object (String, MethodType, etc.)
455 return; 433 assert_different_registers(result, tmp);
456 } 434 __ get_cache_index_at_bcp(tmp, 1, index_size);
457 435 __ load_resolved_reference_at_index(result, tmp);
458 const Register cache = rcx; 436 __ testl(result, result);
459 const Register index = rdx; 437 __ jcc(Assembler::notZero, resolved);
460 438
461 resolve_cache_and_index(f12_oop, rax, cache, index, wide ? sizeof(u2) : sizeof(u1)); 439 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);
440
441 // first time invocation - must resolve first
442 __ movl(tmp, (int)bytecode());
443 __ call_VM(result, entry, tmp);
444
445 __ bind(resolved);
446
462 if (VerifyOops) { 447 if (VerifyOops) {
463 __ verify_oop(rax); 448 __ verify_oop(result);
464 } 449 }
465
466 Label L_done, L_throw_exception;
467 const Register con_klass_temp = rcx; // same as cache
468 const Register array_klass_temp = rdx; // same as index
469 __ load_klass(con_klass_temp, rax);
470 __ lea(array_klass_temp, ExternalAddress((address)Universe::systemObjArrayKlassObj_addr()));
471 __ cmpptr(con_klass_temp, Address(array_klass_temp, 0));
472 __ jcc(Assembler::notEqual, L_done);
473 __ cmpl(Address(rax, arrayOopDesc::length_offset_in_bytes()), 0);
474 __ jcc(Assembler::notEqual, L_throw_exception);
475 __ xorptr(rax, rax);
476 __ jmp(L_done);
477
478 // Load the exception from the system-array which wraps it:
479 __ bind(L_throw_exception);
480 __ load_heap_oop(rax, Address(rax, arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
481 __ jump(ExternalAddress(Interpreter::throw_exception_entry()));
482
483 __ bind(L_done);
484 } 450 }
485 451
486 void TemplateTable::ldc2_w() { 452 void TemplateTable::ldc2_w() {
487 transition(vtos, vtos); 453 transition(vtos, vtos);
488 Label Long, Done; 454 Label Long, Done;
489 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); 455 __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
490 456
491 __ get_cpool_and_tags(rcx, rax); 457 __ get_cpool_and_tags(rcx, rax);
492 const int base_offset = constantPoolOopDesc::header_size() * wordSize; 458 const int base_offset = ConstantPool::header_size() * wordSize;
493 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; 459 const int tags_offset = Array<u1>::base_offset_in_bytes();
494 460
495 // get type 461 // get type
496 __ cmpb(Address(rax, rbx, Address::times_1, tags_offset), 462 __ cmpb(Address(rax, rbx, Address::times_1, tags_offset),
497 JVM_CONSTANT_Double); 463 JVM_CONSTANT_Double);
498 __ jccb(Assembler::notEqual, Long); 464 __ jccb(Assembler::notEqual, Long);
1594 void TemplateTable::branch(bool is_jsr, bool is_wide) { 1560 void TemplateTable::branch(bool is_jsr, bool is_wide) {
1595 __ get_method(rcx); // rcx holds method 1561 __ get_method(rcx); // rcx holds method
1596 __ profile_taken_branch(rax, rbx); // rax holds updated MDP, rbx 1562 __ profile_taken_branch(rax, rbx); // rax holds updated MDP, rbx
1597 // holds bumped taken count 1563 // holds bumped taken count
1598 1564
1599 const ByteSize be_offset = methodOopDesc::backedge_counter_offset() + 1565 const ByteSize be_offset = Method::backedge_counter_offset() +
1600 InvocationCounter::counter_offset(); 1566 InvocationCounter::counter_offset();
1601 const ByteSize inv_offset = methodOopDesc::invocation_counter_offset() + 1567 const ByteSize inv_offset = Method::invocation_counter_offset() +
1602 InvocationCounter::counter_offset(); 1568 InvocationCounter::counter_offset();
1603 const int method_offset = frame::interpreter_frame_method_offset * wordSize; 1569 const int method_offset = frame::interpreter_frame_method_offset * wordSize;
1604 1570
1605 // Load up edx with the branch displacement 1571 // Load up edx with the branch displacement
1606 __ movl(rdx, at_bcp(1)); 1572 __ movl(rdx, at_bcp(1));
1618 // Pre-load the next target bytecode into rbx 1584 // Pre-load the next target bytecode into rbx
1619 __ load_unsigned_byte(rbx, Address(r13, rdx, Address::times_1, 0)); 1585 __ load_unsigned_byte(rbx, Address(r13, rdx, Address::times_1, 0));
1620 1586
1621 // compute return address as bci in rax 1587 // compute return address as bci in rax
1622 __ lea(rax, at_bcp((is_wide ? 5 : 3) - 1588 __ lea(rax, at_bcp((is_wide ? 5 : 3) -
1623 in_bytes(constMethodOopDesc::codes_offset()))); 1589 in_bytes(ConstMethod::codes_offset())));
1624 __ subptr(rax, Address(rcx, methodOopDesc::const_offset())); 1590 __ subptr(rax, Address(rcx, Method::const_offset()));
1625 // Adjust the bcp in r13 by the displacement in rdx 1591 // Adjust the bcp in r13 by the displacement in rdx
1626 __ addptr(r13, rdx); 1592 __ addptr(r13, rdx);
1627 // jsr returns atos that is not an oop 1593 // jsr returns atos that is not an oop
1628 __ push_i(rax); 1594 __ push_i(rax);
1629 __ dispatch_only(vtos); 1595 __ dispatch_only(vtos);
1654 Label no_mdo; 1620 Label no_mdo;
1655 int increment = InvocationCounter::count_increment; 1621 int increment = InvocationCounter::count_increment;
1656 int mask = ((1 << Tier0BackedgeNotifyFreqLog) - 1) << InvocationCounter::count_shift; 1622 int mask = ((1 << Tier0BackedgeNotifyFreqLog) - 1) << InvocationCounter::count_shift;
1657 if (ProfileInterpreter) { 1623 if (ProfileInterpreter) {
1658 // Are we profiling? 1624 // Are we profiling?
1659 __ movptr(rbx, Address(rcx, in_bytes(methodOopDesc::method_data_offset()))); 1625 __ movptr(rbx, Address(rcx, in_bytes(Method::method_data_offset())));
1660 __ testptr(rbx, rbx); 1626 __ testptr(rbx, rbx);
1661 __ jccb(Assembler::zero, no_mdo); 1627 __ jccb(Assembler::zero, no_mdo);
1662 // Increment the MDO backedge counter 1628 // Increment the MDO backedge counter
1663 const Address mdo_backedge_counter(rbx, in_bytes(methodDataOopDesc::backedge_counter_offset()) + 1629 const Address mdo_backedge_counter(rbx, in_bytes(MethodData::backedge_counter_offset()) +
1664 in_bytes(InvocationCounter::counter_offset())); 1630 in_bytes(InvocationCounter::counter_offset()));
1665 __ increment_mask_and_jump(mdo_backedge_counter, increment, mask, 1631 __ increment_mask_and_jump(mdo_backedge_counter, increment, mask,
1666 rax, false, Assembler::zero, &backedge_counter_overflow); 1632 rax, false, Assembler::zero, &backedge_counter_overflow);
1667 __ jmp(dispatch); 1633 __ jmp(dispatch);
1668 } 1634 }
1669 __ bind(no_mdo); 1635 __ bind(no_mdo);
1670 // Increment backedge counter in methodOop 1636 // Increment backedge counter in Method*
1671 __ increment_mask_and_jump(Address(rcx, be_offset), increment, mask, 1637 __ increment_mask_and_jump(Address(rcx, be_offset), increment, mask,
1672 rax, false, Assembler::zero, &backedge_counter_overflow); 1638 rax, false, Assembler::zero, &backedge_counter_overflow);
1673 } else { 1639 } else {
1674 // increment counter 1640 // increment counter
1675 __ movl(rax, Address(rcx, be_offset)); // load backedge counter 1641 __ movl(rax, Address(rcx, be_offset)); // load backedge counter
1694 __ cmp32(rbx, 1660 __ cmp32(rbx,
1695 ExternalAddress((address) &InvocationCounter::InterpreterBackwardBranchLimit)); 1661 ExternalAddress((address) &InvocationCounter::InterpreterBackwardBranchLimit));
1696 __ jcc(Assembler::below, dispatch); 1662 __ jcc(Assembler::below, dispatch);
1697 1663
1698 // When ProfileInterpreter is on, the backedge_count comes 1664 // When ProfileInterpreter is on, the backedge_count comes
1699 // from the methodDataOop, which value does not get reset on 1665 // from the MethodData*, which value does not get reset on
1700 // the call to frequency_counter_overflow(). To avoid 1666 // the call to frequency_counter_overflow(). To avoid
1701 // excessive calls to the overflow routine while the method is 1667 // excessive calls to the overflow routine while the method is
1702 // being compiled, add a second test to make sure the overflow 1668 // being compiled, add a second test to make sure the overflow
1703 // function is called only once every overflow_frequency. 1669 // function is called only once every overflow_frequency.
1704 const int overflow_frequency = 1024; 1670 const int overflow_frequency = 1024;
1852 transition(vtos, vtos); 1818 transition(vtos, vtos);
1853 locals_index(rbx); 1819 locals_index(rbx);
1854 __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp 1820 __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
1855 __ profile_ret(rbx, rcx); 1821 __ profile_ret(rbx, rcx);
1856 __ get_method(rax); 1822 __ get_method(rax);
1857 __ movptr(r13, Address(rax, methodOopDesc::const_offset())); 1823 __ movptr(r13, Address(rax, Method::const_offset()));
1858 __ lea(r13, Address(r13, rbx, Address::times_1, 1824 __ lea(r13, Address(r13, rbx, Address::times_1,
1859 constMethodOopDesc::codes_offset())); 1825 ConstMethod::codes_offset()));
1860 __ dispatch_next(vtos); 1826 __ dispatch_next(vtos);
1861 } 1827 }
1862 1828
1863 void TemplateTable::wide_ret() { 1829 void TemplateTable::wide_ret() {
1864 transition(vtos, vtos); 1830 transition(vtos, vtos);
1865 locals_index_wide(rbx); 1831 locals_index_wide(rbx);
1866 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp 1832 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
1867 __ profile_ret(rbx, rcx); 1833 __ profile_ret(rbx, rcx);
1868 __ get_method(rax); 1834 __ get_method(rax);
1869 __ movptr(r13, Address(rax, methodOopDesc::const_offset())); 1835 __ movptr(r13, Address(rax, Method::const_offset()));
1870 __ lea(r13, Address(r13, rbx, Address::times_1, constMethodOopDesc::codes_offset())); 1836 __ lea(r13, Address(r13, rbx, Address::times_1, ConstMethod::codes_offset()));
1871 __ dispatch_next(vtos); 1837 __ dispatch_next(vtos);
1872 } 1838 }
1873 1839
1874 void TemplateTable::tableswitch() { 1840 void TemplateTable::tableswitch() {
1875 Label default_case, continue_execution; 1841 Label default_case, continue_execution;
2115 __ membar(order_constraint); 2081 __ membar(order_constraint);
2116 } 2082 }
2117 } 2083 }
2118 2084
2119 void TemplateTable::resolve_cache_and_index(int byte_no, 2085 void TemplateTable::resolve_cache_and_index(int byte_no,
2120 Register result,
2121 Register Rcache, 2086 Register Rcache,
2122 Register index, 2087 Register index,
2123 size_t index_size) { 2088 size_t index_size) {
2124 const Register temp = rbx; 2089 const Register temp = rbx;
2125 assert_different_registers(result, Rcache, index, temp); 2090 assert_different_registers(Rcache, index, temp);
2126 2091
2127 Label resolved; 2092 Label resolved;
2128 if (byte_no == f12_oop) {
2129 // We are resolved if the f1 field contains a non-null object (CallSite, MethodType, etc.)
2130 // This kind of CP cache entry does not need to match bytecode_1 or bytecode_2, because
2131 // there is a 1-1 relation between bytecode type and CP entry type.
2132 // The caller will also load a methodOop from f2.
2133 assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD)
2134 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2135 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()));
2136 __ testptr(result, result);
2137 __ jcc(Assembler::notEqual, resolved);
2138 } else {
2139 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); 2093 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2140 assert(result == noreg, ""); //else change code for setting result
2141 __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size); 2094 __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2142 __ cmpl(temp, (int) bytecode()); // have we resolved this bytecode? 2095 __ cmpl(temp, (int) bytecode()); // have we resolved this bytecode?
2143 __ jcc(Assembler::equal, resolved); 2096 __ jcc(Assembler::equal, resolved);
2144 }
2145 2097
2146 // resolve first time through 2098 // resolve first time through
2147 address entry; 2099 address entry;
2148 switch (bytecode()) { 2100 switch (bytecode()) {
2149 case Bytecodes::_getstatic: 2101 case Bytecodes::_getstatic:
2162 entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle); 2114 entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle);
2163 break; 2115 break;
2164 case Bytecodes::_invokedynamic: 2116 case Bytecodes::_invokedynamic:
2165 entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); 2117 entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);
2166 break; 2118 break;
2167 case Bytecodes::_fast_aldc:
2168 entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);
2169 break;
2170 case Bytecodes::_fast_aldc_w:
2171 entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);
2172 break;
2173 default: 2119 default:
2174 fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode()))); 2120 fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode())));
2175 break; 2121 break;
2176 } 2122 }
2177 __ movl(temp, (int) bytecode()); 2123 __ movl(temp, (int) bytecode());
2178 __ call_VM(noreg, entry, temp); 2124 __ call_VM(noreg, entry, temp);
2179 2125
2180 // Update registers with resolved info 2126 // Update registers with resolved info
2181 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); 2127 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2182 if (result != noreg)
2183 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()));
2184 __ bind(resolved); 2128 __ bind(resolved);
2185 } 2129 }
2186 2130
2187 // The cache and index registers must be set before call 2131 // The cache and index registers must be set before call
2188 void TemplateTable::load_field_cp_cache_entry(Register obj, 2132 void TemplateTable::load_field_cp_cache_entry(Register obj,
2191 Register off, 2135 Register off,
2192 Register flags, 2136 Register flags,
2193 bool is_static = false) { 2137 bool is_static = false) {
2194 assert_different_registers(cache, index, flags, off); 2138 assert_different_registers(cache, index, flags, off);
2195 2139
2196 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2140 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2197 // Field offset 2141 // Field offset
2198 __ movptr(off, Address(cache, index, Address::times_ptr, 2142 __ movptr(off, Address(cache, index, Address::times_ptr,
2199 in_bytes(cp_base_offset + 2143 in_bytes(cp_base_offset +
2200 ConstantPoolCacheEntry::f2_offset()))); 2144 ConstantPoolCacheEntry::f2_offset())));
2201 // Flags 2145 // Flags
2206 // klass overwrite register 2150 // klass overwrite register
2207 if (is_static) { 2151 if (is_static) {
2208 __ movptr(obj, Address(cache, index, Address::times_ptr, 2152 __ movptr(obj, Address(cache, index, Address::times_ptr,
2209 in_bytes(cp_base_offset + 2153 in_bytes(cp_base_offset +
2210 ConstantPoolCacheEntry::f1_offset()))); 2154 ConstantPoolCacheEntry::f1_offset())));
2155 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
2156 __ movptr(obj, Address(obj, mirror_offset));
2211 } 2157 }
2212 } 2158 }
2213 2159
2214 void TemplateTable::load_invoke_cp_cache_entry(int byte_no, 2160 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2215 Register method, 2161 Register method,
2226 assert_different_registers(itable_index, flags); 2172 assert_different_registers(itable_index, flags);
2227 assert_different_registers(itable_index, cache, index); 2173 assert_different_registers(itable_index, cache, index);
2228 // determine constant pool cache field offsets 2174 // determine constant pool cache field offsets
2229 assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant"); 2175 assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant");
2230 const int method_offset = in_bytes( 2176 const int method_offset = in_bytes(
2231 constantPoolCacheOopDesc::base_offset() + 2177 ConstantPoolCache::base_offset() +
2232 ((byte_no == f2_byte) 2178 ((byte_no == f2_byte)
2233 ? ConstantPoolCacheEntry::f2_offset() 2179 ? ConstantPoolCacheEntry::f2_offset()
2234 : ConstantPoolCacheEntry::f1_offset())); 2180 : ConstantPoolCacheEntry::f1_offset()));
2235 const int flags_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + 2181 const int flags_offset = in_bytes(ConstantPoolCache::base_offset() +
2236 ConstantPoolCacheEntry::flags_offset()); 2182 ConstantPoolCacheEntry::flags_offset());
2237 // access constant pool cache fields 2183 // access constant pool cache fields
2238 const int index_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + 2184 const int index_offset = in_bytes(ConstantPoolCache::base_offset() +
2239 ConstantPoolCacheEntry::f2_offset()); 2185 ConstantPoolCacheEntry::f2_offset());
2240 2186
2241 if (byte_no == f12_oop) {
2242 // Resolved f1_oop (CallSite, MethodType, etc.) goes into 'itable_index'.
2243 // Resolved f2_oop (methodOop invoker) will go into 'method' (at index_offset).
2244 // See ConstantPoolCacheEntry::set_dynamic_call and set_method_handle.
2245 size_t index_size = (is_invokedynamic ? sizeof(u4) : sizeof(u2)); 2187 size_t index_size = (is_invokedynamic ? sizeof(u4) : sizeof(u2));
2246 resolve_cache_and_index(byte_no, itable_index, cache, index, index_size); 2188 resolve_cache_and_index(byte_no, cache, index, index_size);
2247 __ movptr(method, Address(cache, index, Address::times_ptr, index_offset));
2248 itable_index = noreg; // hack to disable load below
2249 } else {
2250 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2));
2251 __ movptr(method, Address(cache, index, Address::times_ptr, method_offset)); 2189 __ movptr(method, Address(cache, index, Address::times_ptr, method_offset));
2252 } 2190
2253 if (itable_index != noreg) { 2191 if (itable_index != noreg) {
2254 // pick up itable index from f2 also: 2192 // pick up itable or appendix index from f2 also:
2255 assert(byte_no == f1_byte, "already picked up f1");
2256 __ movptr(itable_index, Address(cache, index, Address::times_ptr, index_offset)); 2193 __ movptr(itable_index, Address(cache, index, Address::times_ptr, index_offset));
2257 } 2194 }
2258 __ movl(flags, Address(cache, index, Address::times_ptr, flags_offset)); 2195 __ movl(flags, Address(cache, index, Address::times_ptr, flags_offset));
2259 } 2196 }
2260 2197
2261
2262 // The registers cache and index expected to be set before call.
2263 // Correct values of the cache and index registers are preserved. 2198 // Correct values of the cache and index registers are preserved.
2264 void TemplateTable::jvmti_post_field_access(Register cache, Register index, 2199 void TemplateTable::jvmti_post_field_access(Register cache, Register index,
2265 bool is_static, bool has_tos) { 2200 bool is_static, bool has_tos) {
2266 // do the JVMTI work here to avoid disturbing the register state below 2201 // do the JVMTI work here to avoid disturbing the register state below
2267 // We use c_rarg registers here because we want to use the register used in 2202 // We use c_rarg registers here because we want to use the register used in
2276 __ jcc(Assembler::zero, L1); 2211 __ jcc(Assembler::zero, L1);
2277 2212
2278 __ get_cache_and_index_at_bcp(c_rarg2, c_rarg3, 1); 2213 __ get_cache_and_index_at_bcp(c_rarg2, c_rarg3, 1);
2279 2214
2280 // cache entry pointer 2215 // cache entry pointer
2281 __ addptr(c_rarg2, in_bytes(constantPoolCacheOopDesc::base_offset())); 2216 __ addptr(c_rarg2, in_bytes(ConstantPoolCache::base_offset()));
2282 __ shll(c_rarg3, LogBytesPerWord); 2217 __ shll(c_rarg3, LogBytesPerWord);
2283 __ addptr(c_rarg2, c_rarg3); 2218 __ addptr(c_rarg2, c_rarg3);
2284 if (is_static) { 2219 if (is_static) {
2285 __ xorl(c_rarg1, c_rarg1); // NULL object reference 2220 __ xorl(c_rarg1, c_rarg1); // NULL object reference
2286 } else { 2221 } else {
2312 const Register obj = c_rarg3; 2247 const Register obj = c_rarg3;
2313 const Register off = rbx; 2248 const Register off = rbx;
2314 const Register flags = rax; 2249 const Register flags = rax;
2315 const Register bc = c_rarg3; // uses same reg as obj, so don't mix them 2250 const Register bc = c_rarg3; // uses same reg as obj, so don't mix them
2316 2251
2317 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); 2252 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2318 jvmti_post_field_access(cache, index, is_static, false); 2253 jvmti_post_field_access(cache, index, is_static, false);
2319 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); 2254 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2320 2255
2321 if (!is_static) { 2256 if (!is_static) {
2322 // obj is on the stack 2257 // obj is on the stack
2451 // The registers cache and index expected to be set before call. 2386 // The registers cache and index expected to be set before call.
2452 // The function may destroy various registers, just not the cache and index registers. 2387 // The function may destroy various registers, just not the cache and index registers.
2453 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) { 2388 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2454 transition(vtos, vtos); 2389 transition(vtos, vtos);
2455 2390
2456 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2391 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2457 2392
2458 if (JvmtiExport::can_post_field_modification()) { 2393 if (JvmtiExport::can_post_field_modification()) {
2459 // Check to see if a field modification watch has been set before 2394 // Check to see if a field modification watch has been set before
2460 // we take the time to call into the VM. 2395 // we take the time to call into the VM.
2461 Label L1; 2396 Label L1;
2515 const Register obj = rcx; 2450 const Register obj = rcx;
2516 const Register off = rbx; 2451 const Register off = rbx;
2517 const Register flags = rax; 2452 const Register flags = rax;
2518 const Register bc = c_rarg3; 2453 const Register bc = c_rarg3;
2519 2454
2520 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); 2455 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2521 jvmti_post_field_mod(cache, index, is_static); 2456 jvmti_post_field_mod(cache, index, is_static);
2522 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); 2457 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2523 2458
2524 // [jk] not needed currently 2459 // [jk] not needed currently
2525 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore | 2460 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2739 } 2674 }
2740 2675
2741 void TemplateTable::fast_storefield(TosState state) { 2676 void TemplateTable::fast_storefield(TosState state) {
2742 transition(state, vtos); 2677 transition(state, vtos);
2743 2678
2744 ByteSize base = constantPoolCacheOopDesc::base_offset(); 2679 ByteSize base = ConstantPoolCache::base_offset();
2745 2680
2746 jvmti_post_fast_field_mod(); 2681 jvmti_post_fast_field_mod();
2747 2682
2748 // access constant pool cache 2683 // access constant pool cache
2749 __ get_cache_and_index_at_bcp(rcx, rbx, 1); 2684 __ get_cache_and_index_at_bcp(rcx, rbx, 1);
2839 __ get_cache_and_index_at_bcp(rcx, rbx, 1); 2774 __ get_cache_and_index_at_bcp(rcx, rbx, 1);
2840 // replace index with field offset from cache entry 2775 // replace index with field offset from cache entry
2841 // [jk] not needed currently 2776 // [jk] not needed currently
2842 // if (os::is_MP()) { 2777 // if (os::is_MP()) {
2843 // __ movl(rdx, Address(rcx, rbx, Address::times_8, 2778 // __ movl(rdx, Address(rcx, rbx, Address::times_8,
2844 // in_bytes(constantPoolCacheOopDesc::base_offset() + 2779 // in_bytes(ConstantPoolCache::base_offset() +
2845 // ConstantPoolCacheEntry::flags_offset()))); 2780 // ConstantPoolCacheEntry::flags_offset())));
2846 // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift); 2781 // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2847 // __ andl(rdx, 0x1); 2782 // __ andl(rdx, 0x1);
2848 // } 2783 // }
2849 __ movptr(rbx, Address(rcx, rbx, Address::times_8, 2784 __ movptr(rbx, Address(rcx, rbx, Address::times_8,
2850 in_bytes(constantPoolCacheOopDesc::base_offset() + 2785 in_bytes(ConstantPoolCache::base_offset() +
2851 ConstantPoolCacheEntry::f2_offset()))); 2786 ConstantPoolCacheEntry::f2_offset())));
2852 2787
2853 // rax: object 2788 // rax: object
2854 __ verify_oop(rax); 2789 __ verify_oop(rax);
2855 __ null_check(rax); 2790 __ null_check(rax);
2902 __ movptr(rax, aaddress(0)); 2837 __ movptr(rax, aaddress(0));
2903 // access constant pool cache 2838 // access constant pool cache
2904 __ get_cache_and_index_at_bcp(rcx, rdx, 2); 2839 __ get_cache_and_index_at_bcp(rcx, rdx, 2);
2905 __ movptr(rbx, 2840 __ movptr(rbx,
2906 Address(rcx, rdx, Address::times_8, 2841 Address(rcx, rdx, Address::times_8,
2907 in_bytes(constantPoolCacheOopDesc::base_offset() + 2842 in_bytes(ConstantPoolCache::base_offset() +
2908 ConstantPoolCacheEntry::f2_offset()))); 2843 ConstantPoolCacheEntry::f2_offset())));
2909 // make sure exception is reported in correct bcp range (getfield is 2844 // make sure exception is reported in correct bcp range (getfield is
2910 // next instruction) 2845 // next instruction)
2911 __ increment(r13); 2846 __ increment(r13);
2912 __ null_check(rax); 2847 __ null_check(rax);
2927 2862
2928 // [jk] not needed currently 2863 // [jk] not needed currently
2929 // if (os::is_MP()) { 2864 // if (os::is_MP()) {
2930 // Label notVolatile; 2865 // Label notVolatile;
2931 // __ movl(rdx, Address(rcx, rdx, Address::times_8, 2866 // __ movl(rdx, Address(rcx, rdx, Address::times_8,
2932 // in_bytes(constantPoolCacheOopDesc::base_offset() + 2867 // in_bytes(ConstantPoolCache::base_offset() +
2933 // ConstantPoolCacheEntry::flags_offset()))); 2868 // ConstantPoolCacheEntry::flags_offset())));
2934 // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift); 2869 // __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2935 // __ testl(rdx, 0x1); 2870 // __ testl(rdx, 0x1);
2936 // __ jcc(Assembler::zero, notVolatile); 2871 // __ jcc(Assembler::zero, notVolatile);
2937 // __ membar(Assembler::LoadLoad); 2872 // __ membar(Assembler::LoadLoad);
2982 load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual, false, is_invokedynamic); 2917 load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual, false, is_invokedynamic);
2983 2918
2984 // maybe push appendix to arguments (just before return address) 2919 // maybe push appendix to arguments (just before return address)
2985 if (is_invokedynamic || is_invokehandle) { 2920 if (is_invokedynamic || is_invokehandle) {
2986 Label L_no_push; 2921 Label L_no_push;
2987 __ verify_oop(index);
2988 __ testl(flags, (1 << ConstantPoolCacheEntry::has_appendix_shift)); 2922 __ testl(flags, (1 << ConstantPoolCacheEntry::has_appendix_shift));
2989 __ jccb(Assembler::zero, L_no_push); 2923 __ jcc(Assembler::zero, L_no_push);
2990 // Push the appendix as a trailing parameter. 2924 // Push the appendix as a trailing parameter.
2991 // This must be done before we get the receiver, 2925 // This must be done before we get the receiver,
2992 // since the parameter_size includes it. 2926 // since the parameter_size includes it.
2927 __ push(rbx);
2928 __ mov(rbx, index);
2929 __ load_resolved_reference_at_index(index, rbx);
2930 __ pop(rbx);
2993 __ push(index); // push appendix (MethodType, CallSite, etc.) 2931 __ push(index); // push appendix (MethodType, CallSite, etc.)
2994 __ bind(L_no_push); 2932 __ bind(L_no_push);
2995 } 2933 }
2996 2934
2997 // load receiver if needed (after appendix is pushed so parameter size is correct) 2935 // load receiver if needed (after appendix is pushed so parameter size is correct)
3050 __ andl(rax, (1 << ConstantPoolCacheEntry::is_vfinal_shift)); 2988 __ andl(rax, (1 << ConstantPoolCacheEntry::is_vfinal_shift));
3051 __ jcc(Assembler::zero, notFinal); 2989 __ jcc(Assembler::zero, notFinal);
3052 2990
3053 const Register method = index; // method must be rbx 2991 const Register method = index; // method must be rbx
3054 assert(method == rbx, 2992 assert(method == rbx,
3055 "methodOop must be rbx for interpreter calling convention"); 2993 "Method* must be rbx for interpreter calling convention");
3056 2994
3057 // do the call - the index is actually the method to call 2995 // do the call - the index is actually the method to call
3058 // that is, f2 is a vtable index if !is_vfinal, else f2 is a methodOop 2996 // that is, f2 is a vtable index if !is_vfinal, else f2 is a Method*
3059 __ verify_oop(method);
3060 2997
3061 // It's final, need a null check here! 2998 // It's final, need a null check here!
3062 __ null_check(recv); 2999 __ null_check(recv);
3063 3000
3064 // profile this call 3001 // profile this call
3069 __ bind(notFinal); 3006 __ bind(notFinal);
3070 3007
3071 // get receiver klass 3008 // get receiver klass
3072 __ null_check(recv, oopDesc::klass_offset_in_bytes()); 3009 __ null_check(recv, oopDesc::klass_offset_in_bytes());
3073 __ load_klass(rax, recv); 3010 __ load_klass(rax, recv);
3074 __ verify_oop(rax);
3075 3011
3076 // profile this call 3012 // profile this call
3077 __ profile_virtual_call(rax, r14, rdx); 3013 __ profile_virtual_call(rax, r14, rdx);
3078 3014
3079 // get target methodOop & entry point 3015 // get target Method* & entry point
3080 __ lookup_virtual_method(rax, index, method); 3016 __ lookup_virtual_method(rax, index, method);
3081 __ jump_from_interpreted(method, rdx); 3017 __ jump_from_interpreted(method, rdx);
3082 } 3018 }
3083 3019
3084 3020
3099 3035
3100 3036
3101 void TemplateTable::invokespecial(int byte_no) { 3037 void TemplateTable::invokespecial(int byte_no) {
3102 transition(vtos, vtos); 3038 transition(vtos, vtos);
3103 assert(byte_no == f1_byte, "use this argument"); 3039 assert(byte_no == f1_byte, "use this argument");
3104 prepare_invoke(byte_no, rbx, noreg, // get f1 methodOop 3040 prepare_invoke(byte_no, rbx, noreg, // get f1 Method*
3105 rcx); // get receiver also for null check 3041 rcx); // get receiver also for null check
3106 __ verify_oop(rcx); 3042 __ verify_oop(rcx);
3107 __ null_check(rcx); 3043 __ null_check(rcx);
3108 // do the call 3044 // do the call
3109 __ verify_oop(rbx);
3110 __ profile_call(rax); 3045 __ profile_call(rax);
3111 __ jump_from_interpreted(rbx, rax); 3046 __ jump_from_interpreted(rbx, rax);
3112 } 3047 }
3113 3048
3114 3049
3115 void TemplateTable::invokestatic(int byte_no) { 3050 void TemplateTable::invokestatic(int byte_no) {
3116 transition(vtos, vtos); 3051 transition(vtos, vtos);
3117 assert(byte_no == f1_byte, "use this argument"); 3052 assert(byte_no == f1_byte, "use this argument");
3118 prepare_invoke(byte_no, rbx); // get f1 methodOop 3053 prepare_invoke(byte_no, rbx); // get f1 Method*
3119 // do the call 3054 // do the call
3120 __ verify_oop(rbx);
3121 __ profile_call(rax); 3055 __ profile_call(rax);
3122 __ jump_from_interpreted(rbx, rax); 3056 __ jump_from_interpreted(rbx, rax);
3123 } 3057 }
3124 3058
3125 void TemplateTable::fast_invokevfinal(int byte_no) { 3059 void TemplateTable::fast_invokevfinal(int byte_no) {
3129 } 3063 }
3130 3064
3131 void TemplateTable::invokeinterface(int byte_no) { 3065 void TemplateTable::invokeinterface(int byte_no) {
3132 transition(vtos, vtos); 3066 transition(vtos, vtos);
3133 assert(byte_no == f1_byte, "use this argument"); 3067 assert(byte_no == f1_byte, "use this argument");
3134 prepare_invoke(byte_no, rax, rbx, // get f1 klassOop, f2 itable index 3068 prepare_invoke(byte_no, rax, rbx, // get f1 Klass*, f2 itable index
3135 rcx, rdx); // recv, flags 3069 rcx, rdx); // recv, flags
3136 3070
3137 // rax: interface klass (from f1) 3071 // rax: interface klass (from f1)
3138 // rbx: itable index (from f2) 3072 // rbx: itable index (from f2)
3139 // rcx: receiver 3073 // rcx: receiver
3153 3087
3154 // Get receiver klass into rdx - also a null check 3088 // Get receiver klass into rdx - also a null check
3155 __ restore_locals(); // restore r14 3089 __ restore_locals(); // restore r14
3156 __ null_check(rcx, oopDesc::klass_offset_in_bytes()); 3090 __ null_check(rcx, oopDesc::klass_offset_in_bytes());
3157 __ load_klass(rdx, rcx); 3091 __ load_klass(rdx, rcx);
3158 __ verify_oop(rdx);
3159 3092
3160 // profile this call 3093 // profile this call
3161 __ profile_virtual_call(rdx, r13, r14); 3094 __ profile_virtual_call(rdx, r13, r14);
3162 3095
3163 Label no_such_interface, no_such_method; 3096 Label no_such_interface, no_such_method;
3166 rdx, rax, rbx, 3099 rdx, rax, rbx,
3167 // outputs: method, scan temp. reg 3100 // outputs: method, scan temp. reg
3168 rbx, r13, 3101 rbx, r13,
3169 no_such_interface); 3102 no_such_interface);
3170 3103
3171 // rbx: methodOop to call 3104 // rbx: Method* to call
3172 // rcx: receiver 3105 // rcx: receiver
3173 // Check for abstract method error 3106 // Check for abstract method error
3174 // Note: This should be done more efficiently via a throw_abstract_method_error 3107 // Note: This should be done more efficiently via a throw_abstract_method_error
3175 // interpreter entry point and a conditional jump to it in case of a null 3108 // interpreter entry point and a conditional jump to it in case of a null
3176 // method. 3109 // method.
3177 __ testptr(rbx, rbx); 3110 __ testptr(rbx, rbx);
3178 __ jcc(Assembler::zero, no_such_method); 3111 __ jcc(Assembler::zero, no_such_method);
3179 3112
3180 // do the call 3113 // do the call
3181 // rcx: receiver 3114 // rcx: receiver
3182 // rbx,: methodOop 3115 // rbx,: Method*
3183 __ jump_from_interpreted(rbx, rdx); 3116 __ jump_from_interpreted(rbx, rdx);
3184 __ should_not_reach_here(); 3117 __ should_not_reach_here();
3185 3118
3186 // exception handling code follows... 3119 // exception handling code follows...
3187 // note: must restore interpreter registers to canonical 3120 // note: must restore interpreter registers to canonical
3208 } 3141 }
3209 3142
3210 3143
3211 void TemplateTable::invokehandle(int byte_no) { 3144 void TemplateTable::invokehandle(int byte_no) {
3212 transition(vtos, vtos); 3145 transition(vtos, vtos);
3213 assert(byte_no == f12_oop, "use this argument"); 3146 assert(byte_no == f1_byte, "use this argument");
3214 const Register rbx_method = rbx; // f2 3147 const Register rbx_method = rbx; // f2
3215 const Register rax_mtype = rax; // f1 3148 const Register rax_mtype = rax; // f1
3216 const Register rcx_recv = rcx; 3149 const Register rcx_recv = rcx;
3217 const Register rdx_flags = rdx; 3150 const Register rdx_flags = rdx;
3218 3151
3221 __ should_not_reach_here(); 3154 __ should_not_reach_here();
3222 return; 3155 return;
3223 } 3156 }
3224 3157
3225 prepare_invoke(byte_no, 3158 prepare_invoke(byte_no,
3226 rbx_method, rax_mtype, // get f2 methodOop, f1 MethodType 3159 rbx_method, rax_mtype, // get f2 Method*, f1 MethodType
3227 rcx_recv); 3160 rcx_recv);
3228 __ verify_oop(rbx_method); 3161 __ verify_oop(rbx_method);
3229 __ verify_oop(rcx_recv); 3162 __ verify_oop(rcx_recv);
3230 __ null_check(rcx_recv); 3163 __ null_check(rcx_recv);
3231 3164
3238 } 3171 }
3239 3172
3240 3173
3241 void TemplateTable::invokedynamic(int byte_no) { 3174 void TemplateTable::invokedynamic(int byte_no) {
3242 transition(vtos, vtos); 3175 transition(vtos, vtos);
3243 assert(byte_no == f12_oop, "use this argument"); 3176 assert(byte_no == f1_byte, "use this argument");
3244 3177
3245 if (!EnableInvokeDynamic) { 3178 if (!EnableInvokeDynamic) {
3246 // We should not encounter this bytecode if !EnableInvokeDynamic. 3179 // We should not encounter this bytecode if !EnableInvokeDynamic.
3247 // The verifier will stop it. However, if we get past the verifier, 3180 // The verifier will stop it. However, if we get past the verifier,
3248 // this will stop the thread in a reasonable way, without crashing the JVM. 3181 // this will stop the thread in a reasonable way, without crashing the JVM.
3256 const Register rbx_method = rbx; 3189 const Register rbx_method = rbx;
3257 const Register rax_callsite = rax; 3190 const Register rax_callsite = rax;
3258 3191
3259 prepare_invoke(byte_no, rbx_method, rax_callsite); 3192 prepare_invoke(byte_no, rbx_method, rax_callsite);
3260 3193
3261 // rax: CallSite object (from f1) 3194 // rax: CallSite object (from cpool->resolved_references[])
3262 // rbx: MH.linkToCallSite method (from f2) 3195 // rbx: MH.linkToCallSite method (from f2)
3263 3196
3264 // Note: rax_callsite is already pushed by prepare_invoke 3197 // Note: rax_callsite is already pushed by prepare_invoke
3265 3198
3266 // %%% should make a type profile for any invokedynamic that takes a ref argument 3199 // %%% should make a type profile for any invokedynamic that takes a ref argument
3285 Label initialize_object; // including clearing the fields 3218 Label initialize_object; // including clearing the fields
3286 Label allocate_shared; 3219 Label allocate_shared;
3287 3220
3288 __ get_cpool_and_tags(rsi, rax); 3221 __ get_cpool_and_tags(rsi, rax);
3289 // Make sure the class we're about to instantiate has been resolved. 3222 // Make sure the class we're about to instantiate has been resolved.
3290 // This is done before loading instanceKlass to be consistent with the order 3223 // This is done before loading InstanceKlass to be consistent with the order
3291 // how Constant Pool is updated (see constantPoolOopDesc::klass_at_put) 3224 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3292 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; 3225 const int tags_offset = Array<u1>::base_offset_in_bytes();
3293 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), 3226 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset),
3294 JVM_CONSTANT_Class); 3227 JVM_CONSTANT_Class);
3295 __ jcc(Assembler::notEqual, slow_case); 3228 __ jcc(Assembler::notEqual, slow_case);
3296 3229
3297 // get instanceKlass 3230 // get InstanceKlass
3298 __ movptr(rsi, Address(rsi, rdx, 3231 __ movptr(rsi, Address(rsi, rdx,
3299 Address::times_8, sizeof(constantPoolOopDesc))); 3232 Address::times_8, sizeof(ConstantPool)));
3300 3233
3301 // make sure klass is initialized & doesn't have finalizer 3234 // make sure klass is initialized & doesn't have finalizer
3302 // make sure klass is fully initialized 3235 // make sure klass is fully initialized
3303 __ cmpb(Address(rsi, 3236 __ cmpb(Address(rsi,
3304 instanceKlass::init_state_offset()), 3237 InstanceKlass::init_state_offset()),
3305 instanceKlass::fully_initialized); 3238 InstanceKlass::fully_initialized);
3306 __ jcc(Assembler::notEqual, slow_case); 3239 __ jcc(Assembler::notEqual, slow_case);
3307 3240
3308 // get instance_size in instanceKlass (scaled to a count of bytes) 3241 // get instance_size in InstanceKlass (scaled to a count of bytes)
3309 __ movl(rdx, 3242 __ movl(rdx,
3310 Address(rsi, 3243 Address(rsi,
3311 Klass::layout_helper_offset())); 3244 Klass::layout_helper_offset()));
3312 // test to see if it has a finalizer or is malformed in some way 3245 // test to see if it has a finalizer or is malformed in some way
3313 __ testl(rdx, Klass::_lh_instance_slow_path_bit); 3246 __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3468 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array 3401 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3469 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index 3402 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3470 // See if bytecode has already been quicked 3403 // See if bytecode has already been quicked
3471 __ cmpb(Address(rdx, rbx, 3404 __ cmpb(Address(rdx, rbx,
3472 Address::times_1, 3405 Address::times_1,
3473 typeArrayOopDesc::header_size(T_BYTE) * wordSize), 3406 Array<u1>::base_offset_in_bytes()),
3474 JVM_CONSTANT_Class); 3407 JVM_CONSTANT_Class);
3475 __ jcc(Assembler::equal, quicked); 3408 __ jcc(Assembler::equal, quicked);
3476 __ push(atos); // save receiver for result, and for GC 3409 __ push(atos); // save receiver for result, and for GC
3477 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc)); 3410 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3411 // vm_result_2 has metadata result
3412 __ get_vm_result_2(rax, r15_thread);
3478 __ pop_ptr(rdx); // restore receiver 3413 __ pop_ptr(rdx); // restore receiver
3479 __ jmpb(resolved); 3414 __ jmpb(resolved);
3480 3415
3481 // Get superklass in rax and subklass in rbx 3416 // Get superklass in rax and subklass in rbx
3482 __ bind(quicked); 3417 __ bind(quicked);
3483 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check 3418 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
3484 __ movptr(rax, Address(rcx, rbx, 3419 __ movptr(rax, Address(rcx, rbx,
3485 Address::times_8, sizeof(constantPoolOopDesc))); 3420 Address::times_8, sizeof(ConstantPool)));
3486 3421
3487 __ bind(resolved); 3422 __ bind(resolved);
3488 __ load_klass(rbx, rdx); 3423 __ load_klass(rbx, rdx);
3489 3424
3490 // Generate subtype check. Blows rcx, rdi. Object in rdx. 3425 // Generate subtype check. Blows rcx, rdi. Object in rdx.
3521 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array 3456 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3522 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index 3457 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3523 // See if bytecode has already been quicked 3458 // See if bytecode has already been quicked
3524 __ cmpb(Address(rdx, rbx, 3459 __ cmpb(Address(rdx, rbx,
3525 Address::times_1, 3460 Address::times_1,
3526 typeArrayOopDesc::header_size(T_BYTE) * wordSize), 3461 Array<u1>::base_offset_in_bytes()),
3527 JVM_CONSTANT_Class); 3462 JVM_CONSTANT_Class);
3528 __ jcc(Assembler::equal, quicked); 3463 __ jcc(Assembler::equal, quicked);
3529 3464
3530 __ push(atos); // save receiver for result, and for GC 3465 __ push(atos); // save receiver for result, and for GC
3531 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc)); 3466 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3467 // vm_result_2 has metadata result
3468 __ get_vm_result_2(rax, r15_thread);
3532 __ pop_ptr(rdx); // restore receiver 3469 __ pop_ptr(rdx); // restore receiver
3533 __ verify_oop(rdx); 3470 __ verify_oop(rdx);
3534 __ load_klass(rdx, rdx); 3471 __ load_klass(rdx, rdx);
3535 __ jmpb(resolved); 3472 __ jmpb(resolved);
3536 3473
3537 // Get superklass in rax and subklass in rdx 3474 // Get superklass in rax and subklass in rdx
3538 __ bind(quicked); 3475 __ bind(quicked);
3539 __ load_klass(rdx, rax); 3476 __ load_klass(rdx, rax);
3540 __ movptr(rax, Address(rcx, rbx, 3477 __ movptr(rax, Address(rcx, rbx,
3541 Address::times_8, sizeof(constantPoolOopDesc))); 3478 Address::times_8, sizeof(ConstantPool)));
3542 3479
3543 __ bind(resolved); 3480 __ bind(resolved);
3544 3481
3545 // Generate subtype check. Blows rcx, rdi 3482 // Generate subtype check. Blows rcx, rdi
3546 // Superklass in rax. Subklass in rdx. 3483 // Superklass in rax. Subklass in rdx.