comparison src/cpu/x86/vm/templateTable_x86_32.cpp @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 1d7922586cf6
children 75f33eecc1b3
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
1 /* 1 /*
2 * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved. 2 * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 * 4 *
5 * This code is free software; you can redistribute it and/or modify it 5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as 6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation. 7 * published by the Free Software Foundation.
26 #include "asm/assembler.hpp" 26 #include "asm/assembler.hpp"
27 #include "interpreter/interpreter.hpp" 27 #include "interpreter/interpreter.hpp"
28 #include "interpreter/interpreterRuntime.hpp" 28 #include "interpreter/interpreterRuntime.hpp"
29 #include "interpreter/templateTable.hpp" 29 #include "interpreter/templateTable.hpp"
30 #include "memory/universe.inline.hpp" 30 #include "memory/universe.inline.hpp"
31 #include "oops/methodDataOop.hpp" 31 #include "oops/methodData.hpp"
32 #include "oops/objArrayKlass.hpp" 32 #include "oops/objArrayKlass.hpp"
33 #include "oops/oop.inline.hpp" 33 #include "oops/oop.inline.hpp"
34 #include "prims/methodHandles.hpp" 34 #include "prims/methodHandles.hpp"
35 #include "runtime/sharedRuntime.hpp" 35 #include "runtime/sharedRuntime.hpp"
36 #include "runtime/stubRoutines.hpp" 36 #include "runtime/stubRoutines.hpp"
355 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); 355 __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
356 } else { 356 } else {
357 __ load_unsigned_byte(rbx, at_bcp(1)); 357 __ load_unsigned_byte(rbx, at_bcp(1));
358 } 358 }
359 __ get_cpool_and_tags(rcx, rax); 359 __ get_cpool_and_tags(rcx, rax);
360 const int base_offset = constantPoolOopDesc::header_size() * wordSize; 360 const int base_offset = ConstantPool::header_size() * wordSize;
361 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; 361 const int tags_offset = Array<u1>::base_offset_in_bytes();
362 362
363 // get type 363 // get type
364 __ xorptr(rdx, rdx); 364 __ xorptr(rdx, rdx);
365 __ movb(rdx, Address(rax, rbx, Address::times_1, tags_offset)); 365 __ movb(rdx, Address(rax, rbx, Address::times_1, tags_offset));
366
367 // unresolved string - get the resolved string
368 __ cmpl(rdx, JVM_CONSTANT_UnresolvedString);
369 __ jccb(Assembler::equal, call_ldc);
370 366
371 // unresolved class - get the resolved class 367 // unresolved class - get the resolved class
372 __ cmpl(rdx, JVM_CONSTANT_UnresolvedClass); 368 __ cmpl(rdx, JVM_CONSTANT_UnresolvedClass);
373 __ jccb(Assembler::equal, call_ldc); 369 __ jccb(Assembler::equal, call_ldc);
374 370
398 __ bind(notFloat); 394 __ bind(notFloat);
399 #ifdef ASSERT 395 #ifdef ASSERT
400 { Label L; 396 { Label L;
401 __ cmpl(rdx, JVM_CONSTANT_Integer); 397 __ cmpl(rdx, JVM_CONSTANT_Integer);
402 __ jcc(Assembler::equal, L); 398 __ jcc(Assembler::equal, L);
403 __ cmpl(rdx, JVM_CONSTANT_String); 399 // String and Object are rewritten to fast_aldc
404 __ jcc(Assembler::equal, L);
405 __ cmpl(rdx, JVM_CONSTANT_Object);
406 __ jcc(Assembler::equal, L);
407 __ stop("unexpected tag type in ldc"); 400 __ stop("unexpected tag type in ldc");
408 __ bind(L); 401 __ bind(L);
409 } 402 }
410 #endif 403 #endif
411 Label isOop; 404 // itos JVM_CONSTANT_Integer only
412 // atos and itos
413 // Integer is only non-oop type we will see here
414 __ cmpl(rdx, JVM_CONSTANT_Integer);
415 __ jccb(Assembler::notEqual, isOop);
416 __ movl(rax, Address(rcx, rbx, Address::times_ptr, base_offset)); 405 __ movl(rax, Address(rcx, rbx, Address::times_ptr, base_offset));
417 __ push(itos); 406 __ push(itos);
418 __ jmp(Done);
419 __ bind(isOop);
420 __ movptr(rax, Address(rcx, rbx, Address::times_ptr, base_offset));
421 __ push(atos);
422
423 if (VerifyOops) {
424 __ verify_oop(rax);
425 }
426 __ bind(Done); 407 __ bind(Done);
427 } 408 }
428 409
429 // Fast path for caching oop constants. 410 // Fast path for caching oop constants.
430 // %%% We should use this to handle Class and String constants also.
431 // %%% It will simplify the ldc/primitive path considerably.
432 void TemplateTable::fast_aldc(bool wide) { 411 void TemplateTable::fast_aldc(bool wide) {
433 transition(vtos, atos); 412 transition(vtos, atos);
434 413
435 if (!EnableInvokeDynamic) { 414 Register result = rax;
436 // We should not encounter this bytecode if !EnableInvokeDynamic. 415 Register tmp = rdx;
437 // The verifier will stop it. However, if we get past the verifier, 416 int index_size = wide ? sizeof(u2) : sizeof(u1);
438 // this will stop the thread in a reasonable way, without crashing the JVM. 417
439 __ call_VM(noreg, CAST_FROM_FN_PTR(address, 418 Label resolved;
440 InterpreterRuntime::throw_IncompatibleClassChangeError)); 419
441 // the call_VM checks for exception, so we should never return here. 420 // We are resolved if the resolved reference cache entry contains a
442 __ should_not_reach_here(); 421 // non-null object (String, MethodType, etc.)
443 return; 422 assert_different_registers(result, tmp);
444 } 423 __ get_cache_index_at_bcp(tmp, 1, index_size);
445 424 __ load_resolved_reference_at_index(result, tmp);
446 const Register cache = rcx; 425 __ testl(result, result);
447 const Register index = rdx; 426 __ jcc(Assembler::notZero, resolved);
448 427
449 resolve_cache_and_index(f12_oop, rax, cache, index, wide ? sizeof(u2) : sizeof(u1)); 428 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);
429
430 // first time invocation - must resolve first
431 __ movl(tmp, (int)bytecode());
432 __ call_VM(result, entry, tmp);
433
434 __ bind(resolved);
435
450 if (VerifyOops) { 436 if (VerifyOops) {
451 __ verify_oop(rax); 437 __ verify_oop(result);
452 } 438 }
453
454 Label L_done, L_throw_exception;
455 const Register con_klass_temp = rcx; // same as cache
456 __ load_klass(con_klass_temp, rax);
457 __ cmpptr(con_klass_temp, ExternalAddress((address)Universe::systemObjArrayKlassObj_addr()));
458 __ jcc(Assembler::notEqual, L_done);
459 __ cmpl(Address(rax, arrayOopDesc::length_offset_in_bytes()), 0);
460 __ jcc(Assembler::notEqual, L_throw_exception);
461 __ xorptr(rax, rax);
462 __ jmp(L_done);
463
464 // Load the exception from the system-array which wraps it:
465 __ bind(L_throw_exception);
466 __ load_heap_oop(rax, Address(rax, arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
467 __ jump(ExternalAddress(Interpreter::throw_exception_entry()));
468
469 __ bind(L_done);
470 } 439 }
471 440
472 void TemplateTable::ldc2_w() { 441 void TemplateTable::ldc2_w() {
473 transition(vtos, vtos); 442 transition(vtos, vtos);
474 Label Long, Done; 443 Label Long, Done;
475 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); 444 __ get_unsigned_2_byte_index_at_bcp(rbx, 1);
476 445
477 __ get_cpool_and_tags(rcx, rax); 446 __ get_cpool_and_tags(rcx, rax);
478 const int base_offset = constantPoolOopDesc::header_size() * wordSize; 447 const int base_offset = ConstantPool::header_size() * wordSize;
479 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; 448 const int tags_offset = Array<u1>::base_offset_in_bytes();
480 449
481 // get type 450 // get type
482 __ cmpb(Address(rax, rbx, Address::times_1, tags_offset), JVM_CONSTANT_Double); 451 __ cmpb(Address(rax, rbx, Address::times_1, tags_offset), JVM_CONSTANT_Double);
483 __ jccb(Assembler::notEqual, Long); 452 __ jccb(Assembler::notEqual, Long);
484 // dtos 453 // dtos
1326 case add: __ fadd_d (at_rsp()); break; 1295 case add: __ fadd_d (at_rsp()); break;
1327 case sub: __ fsubr_d(at_rsp()); break; 1296 case sub: __ fsubr_d(at_rsp()); break;
1328 case mul: { 1297 case mul: {
1329 Label L_strict; 1298 Label L_strict;
1330 Label L_join; 1299 Label L_join;
1331 const Address access_flags (rcx, methodOopDesc::access_flags_offset()); 1300 const Address access_flags (rcx, Method::access_flags_offset());
1332 __ get_method(rcx); 1301 __ get_method(rcx);
1333 __ movl(rcx, access_flags); 1302 __ movl(rcx, access_flags);
1334 __ testl(rcx, JVM_ACC_STRICT); 1303 __ testl(rcx, JVM_ACC_STRICT);
1335 __ jccb(Assembler::notZero, L_strict); 1304 __ jccb(Assembler::notZero, L_strict);
1336 __ fmul_d (at_rsp()); 1305 __ fmul_d (at_rsp());
1345 break; 1314 break;
1346 } 1315 }
1347 case div: { 1316 case div: {
1348 Label L_strict; 1317 Label L_strict;
1349 Label L_join; 1318 Label L_join;
1350 const Address access_flags (rcx, methodOopDesc::access_flags_offset()); 1319 const Address access_flags (rcx, Method::access_flags_offset());
1351 __ get_method(rcx); 1320 __ get_method(rcx);
1352 __ movl(rcx, access_flags); 1321 __ movl(rcx, access_flags);
1353 __ testl(rcx, JVM_ACC_STRICT); 1322 __ testl(rcx, JVM_ACC_STRICT);
1354 __ jccb(Assembler::notZero, L_strict); 1323 __ jccb(Assembler::notZero, L_strict);
1355 __ fdivr_d(at_rsp()); 1324 __ fdivr_d(at_rsp());
1574 1543
1575 void TemplateTable::branch(bool is_jsr, bool is_wide) { 1544 void TemplateTable::branch(bool is_jsr, bool is_wide) {
1576 __ get_method(rcx); // ECX holds method 1545 __ get_method(rcx); // ECX holds method
1577 __ profile_taken_branch(rax,rbx); // EAX holds updated MDP, EBX holds bumped taken count 1546 __ profile_taken_branch(rax,rbx); // EAX holds updated MDP, EBX holds bumped taken count
1578 1547
1579 const ByteSize be_offset = methodOopDesc::backedge_counter_offset() + InvocationCounter::counter_offset(); 1548 const ByteSize be_offset = Method::backedge_counter_offset() + InvocationCounter::counter_offset();
1580 const ByteSize inv_offset = methodOopDesc::invocation_counter_offset() + InvocationCounter::counter_offset(); 1549 const ByteSize inv_offset = Method::invocation_counter_offset() + InvocationCounter::counter_offset();
1581 const int method_offset = frame::interpreter_frame_method_offset * wordSize; 1550 const int method_offset = frame::interpreter_frame_method_offset * wordSize;
1582 1551
1583 // Load up EDX with the branch displacement 1552 // Load up EDX with the branch displacement
1584 __ movl(rdx, at_bcp(1)); 1553 __ movl(rdx, at_bcp(1));
1585 __ bswapl(rdx); 1554 __ bswapl(rdx);
1593 if (is_jsr) { 1562 if (is_jsr) {
1594 // Pre-load the next target bytecode into EBX 1563 // Pre-load the next target bytecode into EBX
1595 __ load_unsigned_byte(rbx, Address(rsi, rdx, Address::times_1, 0)); 1564 __ load_unsigned_byte(rbx, Address(rsi, rdx, Address::times_1, 0));
1596 1565
1597 // compute return address as bci in rax, 1566 // compute return address as bci in rax,
1598 __ lea(rax, at_bcp((is_wide ? 5 : 3) - in_bytes(constMethodOopDesc::codes_offset()))); 1567 __ lea(rax, at_bcp((is_wide ? 5 : 3) - in_bytes(ConstMethod::codes_offset())));
1599 __ subptr(rax, Address(rcx, methodOopDesc::const_offset())); 1568 __ subptr(rax, Address(rcx, Method::const_offset()));
1600 // Adjust the bcp in RSI by the displacement in EDX 1569 // Adjust the bcp in RSI by the displacement in EDX
1601 __ addptr(rsi, rdx); 1570 __ addptr(rsi, rdx);
1602 // Push return address 1571 // Push return address
1603 __ push_i(rax); 1572 __ push_i(rax);
1604 // jsr returns vtos 1573 // jsr returns vtos
1630 Label no_mdo; 1599 Label no_mdo;
1631 int increment = InvocationCounter::count_increment; 1600 int increment = InvocationCounter::count_increment;
1632 int mask = ((1 << Tier0BackedgeNotifyFreqLog) - 1) << InvocationCounter::count_shift; 1601 int mask = ((1 << Tier0BackedgeNotifyFreqLog) - 1) << InvocationCounter::count_shift;
1633 if (ProfileInterpreter) { 1602 if (ProfileInterpreter) {
1634 // Are we profiling? 1603 // Are we profiling?
1635 __ movptr(rbx, Address(rcx, in_bytes(methodOopDesc::method_data_offset()))); 1604 __ movptr(rbx, Address(rcx, in_bytes(Method::method_data_offset())));
1636 __ testptr(rbx, rbx); 1605 __ testptr(rbx, rbx);
1637 __ jccb(Assembler::zero, no_mdo); 1606 __ jccb(Assembler::zero, no_mdo);
1638 // Increment the MDO backedge counter 1607 // Increment the MDO backedge counter
1639 const Address mdo_backedge_counter(rbx, in_bytes(methodDataOopDesc::backedge_counter_offset()) + 1608 const Address mdo_backedge_counter(rbx, in_bytes(MethodData::backedge_counter_offset()) +
1640 in_bytes(InvocationCounter::counter_offset())); 1609 in_bytes(InvocationCounter::counter_offset()));
1641 __ increment_mask_and_jump(mdo_backedge_counter, increment, mask, 1610 __ increment_mask_and_jump(mdo_backedge_counter, increment, mask,
1642 rax, false, Assembler::zero, &backedge_counter_overflow); 1611 rax, false, Assembler::zero, &backedge_counter_overflow);
1643 __ jmp(dispatch); 1612 __ jmp(dispatch);
1644 } 1613 }
1645 __ bind(no_mdo); 1614 __ bind(no_mdo);
1646 // Increment backedge counter in methodOop 1615 // Increment backedge counter in Method*
1647 __ increment_mask_and_jump(Address(rcx, be_offset), increment, mask, 1616 __ increment_mask_and_jump(Address(rcx, be_offset), increment, mask,
1648 rax, false, Assembler::zero, &backedge_counter_overflow); 1617 rax, false, Assembler::zero, &backedge_counter_overflow);
1649 } else { 1618 } else {
1650 // increment counter 1619 // increment counter
1651 __ movl(rax, Address(rcx, be_offset)); // load backedge counter 1620 __ movl(rax, Address(rcx, be_offset)); // load backedge counter
1670 __ cmp32(rbx, 1639 __ cmp32(rbx,
1671 ExternalAddress((address) &InvocationCounter::InterpreterBackwardBranchLimit)); 1640 ExternalAddress((address) &InvocationCounter::InterpreterBackwardBranchLimit));
1672 __ jcc(Assembler::below, dispatch); 1641 __ jcc(Assembler::below, dispatch);
1673 1642
1674 // When ProfileInterpreter is on, the backedge_count comes from the 1643 // When ProfileInterpreter is on, the backedge_count comes from the
1675 // methodDataOop, which value does not get reset on the call to 1644 // MethodData*, which value does not get reset on the call to
1676 // frequency_counter_overflow(). To avoid excessive calls to the overflow 1645 // frequency_counter_overflow(). To avoid excessive calls to the overflow
1677 // routine while the method is being compiled, add a second test to make 1646 // routine while the method is being compiled, add a second test to make
1678 // sure the overflow function is called only once every overflow_frequency. 1647 // sure the overflow function is called only once every overflow_frequency.
1679 const int overflow_frequency = 1024; 1648 const int overflow_frequency = 1024;
1680 __ andptr(rbx, overflow_frequency-1); 1649 __ andptr(rbx, overflow_frequency-1);
1820 transition(vtos, vtos); 1789 transition(vtos, vtos);
1821 locals_index(rbx); 1790 locals_index(rbx);
1822 __ movptr(rbx, iaddress(rbx)); // get return bci, compute return bcp 1791 __ movptr(rbx, iaddress(rbx)); // get return bci, compute return bcp
1823 __ profile_ret(rbx, rcx); 1792 __ profile_ret(rbx, rcx);
1824 __ get_method(rax); 1793 __ get_method(rax);
1825 __ movptr(rsi, Address(rax, methodOopDesc::const_offset())); 1794 __ movptr(rsi, Address(rax, Method::const_offset()));
1826 __ lea(rsi, Address(rsi, rbx, Address::times_1, 1795 __ lea(rsi, Address(rsi, rbx, Address::times_1,
1827 constMethodOopDesc::codes_offset())); 1796 ConstMethod::codes_offset()));
1828 __ dispatch_next(vtos); 1797 __ dispatch_next(vtos);
1829 } 1798 }
1830 1799
1831 1800
1832 void TemplateTable::wide_ret() { 1801 void TemplateTable::wide_ret() {
1833 transition(vtos, vtos); 1802 transition(vtos, vtos);
1834 locals_index_wide(rbx); 1803 locals_index_wide(rbx);
1835 __ movptr(rbx, iaddress(rbx)); // get return bci, compute return bcp 1804 __ movptr(rbx, iaddress(rbx)); // get return bci, compute return bcp
1836 __ profile_ret(rbx, rcx); 1805 __ profile_ret(rbx, rcx);
1837 __ get_method(rax); 1806 __ get_method(rax);
1838 __ movptr(rsi, Address(rax, methodOopDesc::const_offset())); 1807 __ movptr(rsi, Address(rax, Method::const_offset()));
1839 __ lea(rsi, Address(rsi, rbx, Address::times_1, constMethodOopDesc::codes_offset())); 1808 __ lea(rsi, Address(rsi, rbx, Address::times_1, ConstMethod::codes_offset()));
1840 __ dispatch_next(vtos); 1809 __ dispatch_next(vtos);
1841 } 1810 }
1842 1811
1843 1812
1844 void TemplateTable::tableswitch() { 1813 void TemplateTable::tableswitch() {
2078 if( !os::is_MP() ) return; // Not needed on single CPU 2047 if( !os::is_MP() ) return; // Not needed on single CPU
2079 __ membar(order_constraint); 2048 __ membar(order_constraint);
2080 } 2049 }
2081 2050
2082 void TemplateTable::resolve_cache_and_index(int byte_no, 2051 void TemplateTable::resolve_cache_and_index(int byte_no,
2083 Register result,
2084 Register Rcache, 2052 Register Rcache,
2085 Register index, 2053 Register index,
2086 size_t index_size) { 2054 size_t index_size) {
2087 const Register temp = rbx; 2055 const Register temp = rbx;
2088 assert_different_registers(result, Rcache, index, temp); 2056 assert_different_registers(Rcache, index, temp);
2089 2057
2090 Label resolved; 2058 Label resolved;
2091 if (byte_no == f12_oop) {
2092 // We are resolved if the f1 field contains a non-null object (CallSite, MethodType, etc.)
2093 // This kind of CP cache entry does not need to match bytecode_1 or bytecode_2, because
2094 // there is a 1-1 relation between bytecode type and CP entry type.
2095 // The caller will also load a methodOop from f2.
2096 assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD)
2097 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2098 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()));
2099 __ testptr(result, result);
2100 __ jcc(Assembler::notEqual, resolved);
2101 } else {
2102 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); 2059 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2103 assert(result == noreg, ""); //else change code for setting result
2104 __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size); 2060 __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2105 __ cmpl(temp, (int) bytecode()); // have we resolved this bytecode? 2061 __ cmpl(temp, (int) bytecode()); // have we resolved this bytecode?
2106 __ jcc(Assembler::equal, resolved); 2062 __ jcc(Assembler::equal, resolved);
2107 }
2108 2063
2109 // resolve first time through 2064 // resolve first time through
2110 address entry; 2065 address entry;
2111 switch (bytecode()) { 2066 switch (bytecode()) {
2112 case Bytecodes::_getstatic : // fall through 2067 case Bytecodes::_getstatic : // fall through
2117 case Bytecodes::_invokespecial : // fall through 2072 case Bytecodes::_invokespecial : // fall through
2118 case Bytecodes::_invokestatic : // fall through 2073 case Bytecodes::_invokestatic : // fall through
2119 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break; 2074 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break;
2120 case Bytecodes::_invokehandle : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle); break; 2075 case Bytecodes::_invokehandle : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle); break;
2121 case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break; 2076 case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break;
2122 case Bytecodes::_fast_aldc : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break;
2123 case Bytecodes::_fast_aldc_w : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc); break;
2124 default: 2077 default:
2125 fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode()))); 2078 fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode())));
2126 break; 2079 break;
2127 } 2080 }
2128 __ movl(temp, (int)bytecode()); 2081 __ movl(temp, (int)bytecode());
2129 __ call_VM(noreg, entry, temp); 2082 __ call_VM(noreg, entry, temp);
2130 // Update registers with resolved info 2083 // Update registers with resolved info
2131 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size); 2084 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2132 if (result != noreg)
2133 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()));
2134 __ bind(resolved); 2085 __ bind(resolved);
2135 } 2086 }
2136 2087
2137 2088
2138 // The cache and index registers must be set before call 2089 // The cache and index registers must be set before call
2142 Register off, 2093 Register off,
2143 Register flags, 2094 Register flags,
2144 bool is_static = false) { 2095 bool is_static = false) {
2145 assert_different_registers(cache, index, flags, off); 2096 assert_different_registers(cache, index, flags, off);
2146 2097
2147 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2098 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2148 // Field offset 2099 // Field offset
2149 __ movptr(off, Address(cache, index, Address::times_ptr, 2100 __ movptr(off, Address(cache, index, Address::times_ptr,
2150 in_bytes(cp_base_offset + ConstantPoolCacheEntry::f2_offset()))); 2101 in_bytes(cp_base_offset + ConstantPoolCacheEntry::f2_offset())));
2151 // Flags 2102 // Flags
2152 __ movl(flags, Address(cache, index, Address::times_ptr, 2103 __ movl(flags, Address(cache, index, Address::times_ptr,
2154 2105
2155 // klass overwrite register 2106 // klass overwrite register
2156 if (is_static) { 2107 if (is_static) {
2157 __ movptr(obj, Address(cache, index, Address::times_ptr, 2108 __ movptr(obj, Address(cache, index, Address::times_ptr,
2158 in_bytes(cp_base_offset + ConstantPoolCacheEntry::f1_offset()))); 2109 in_bytes(cp_base_offset + ConstantPoolCacheEntry::f1_offset())));
2110 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
2111 __ movptr(obj, Address(obj, mirror_offset));
2159 } 2112 }
2160 } 2113 }
2161 2114
2162 void TemplateTable::load_invoke_cp_cache_entry(int byte_no, 2115 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2163 Register method, 2116 Register method,
2174 assert_different_registers(itable_index, flags); 2127 assert_different_registers(itable_index, flags);
2175 assert_different_registers(itable_index, cache, index); 2128 assert_different_registers(itable_index, cache, index);
2176 // determine constant pool cache field offsets 2129 // determine constant pool cache field offsets
2177 assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant"); 2130 assert(is_invokevirtual == (byte_no == f2_byte), "is_invokevirtual flag redundant");
2178 const int method_offset = in_bytes( 2131 const int method_offset = in_bytes(
2179 constantPoolCacheOopDesc::base_offset() + 2132 ConstantPoolCache::base_offset() +
2180 ((byte_no == f2_byte) 2133 ((byte_no == f2_byte)
2181 ? ConstantPoolCacheEntry::f2_offset() 2134 ? ConstantPoolCacheEntry::f2_offset()
2182 : ConstantPoolCacheEntry::f1_offset())); 2135 : ConstantPoolCacheEntry::f1_offset()));
2183 const int flags_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + 2136 const int flags_offset = in_bytes(ConstantPoolCache::base_offset() +
2184 ConstantPoolCacheEntry::flags_offset()); 2137 ConstantPoolCacheEntry::flags_offset());
2185 // access constant pool cache fields 2138 // access constant pool cache fields
2186 const int index_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + 2139 const int index_offset = in_bytes(ConstantPoolCache::base_offset() +
2187 ConstantPoolCacheEntry::f2_offset()); 2140 ConstantPoolCacheEntry::f2_offset());
2188 2141
2189 if (byte_no == f12_oop) {
2190 // Resolved f1_oop (CallSite, MethodType, etc.) goes into 'itable_index'.
2191 // Resolved f2_oop (methodOop invoker) will go into 'method' (at index_offset).
2192 // See ConstantPoolCacheEntry::set_dynamic_call and set_method_handle.
2193 size_t index_size = (is_invokedynamic ? sizeof(u4) : sizeof(u2)); 2142 size_t index_size = (is_invokedynamic ? sizeof(u4) : sizeof(u2));
2194 resolve_cache_and_index(byte_no, itable_index, cache, index, index_size); 2143 resolve_cache_and_index(byte_no, cache, index, index_size);
2195 __ movptr(method, Address(cache, index, Address::times_ptr, index_offset));
2196 itable_index = noreg; // hack to disable load below
2197 } else {
2198 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2));
2199 __ movptr(method, Address(cache, index, Address::times_ptr, method_offset)); 2144 __ movptr(method, Address(cache, index, Address::times_ptr, method_offset));
2200 } 2145
2201 if (itable_index != noreg) { 2146 if (itable_index != noreg) {
2202 // pick up itable index from f2 also:
2203 assert(byte_no == f1_byte, "already picked up f1");
2204 __ movptr(itable_index, Address(cache, index, Address::times_ptr, index_offset)); 2147 __ movptr(itable_index, Address(cache, index, Address::times_ptr, index_offset));
2205 } 2148 }
2206 __ movl(flags, Address(cache, index, Address::times_ptr, flags_offset)); 2149 __ movl(flags, Address(cache, index, Address::times_ptr, flags_offset));
2207 } 2150 }
2208 2151
2221 __ mov32(rax, ExternalAddress((address) JvmtiExport::get_field_access_count_addr())); 2164 __ mov32(rax, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
2222 __ testl(rax,rax); 2165 __ testl(rax,rax);
2223 __ jcc(Assembler::zero, L1); 2166 __ jcc(Assembler::zero, L1);
2224 2167
2225 // cache entry pointer 2168 // cache entry pointer
2226 __ addptr(cache, in_bytes(constantPoolCacheOopDesc::base_offset())); 2169 __ addptr(cache, in_bytes(ConstantPoolCache::base_offset()));
2227 __ shll(index, LogBytesPerWord); 2170 __ shll(index, LogBytesPerWord);
2228 __ addptr(cache, index); 2171 __ addptr(cache, index);
2229 if (is_static) { 2172 if (is_static) {
2230 __ xorptr(rax, rax); // NULL object reference 2173 __ xorptr(rax, rax); // NULL object reference
2231 } else { 2174 } else {
2255 const Register index = rdx; 2198 const Register index = rdx;
2256 const Register obj = rcx; 2199 const Register obj = rcx;
2257 const Register off = rbx; 2200 const Register off = rbx;
2258 const Register flags = rax; 2201 const Register flags = rax;
2259 2202
2260 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); 2203 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2261 jvmti_post_field_access(cache, index, is_static, false); 2204 jvmti_post_field_access(cache, index, is_static, false);
2262 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); 2205 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2263 2206
2264 if (!is_static) pop_and_check_object(obj); 2207 if (!is_static) pop_and_check_object(obj);
2265 2208
2393 2336
2394 // The registers cache and index expected to be set before call. 2337 // The registers cache and index expected to be set before call.
2395 // The function may destroy various registers, just not the cache and index registers. 2338 // The function may destroy various registers, just not the cache and index registers.
2396 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) { 2339 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2397 2340
2398 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2341 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2399 2342
2400 if (JvmtiExport::can_post_field_modification()) { 2343 if (JvmtiExport::can_post_field_modification()) {
2401 // Check to see if a field modification watch has been set before we take 2344 // Check to see if a field modification watch has been set before we take
2402 // the time to call into the VM. 2345 // the time to call into the VM.
2403 Label L1; 2346 Label L1;
2464 const Register index = rdx; 2407 const Register index = rdx;
2465 const Register obj = rcx; 2408 const Register obj = rcx;
2466 const Register off = rbx; 2409 const Register off = rbx;
2467 const Register flags = rax; 2410 const Register flags = rax;
2468 2411
2469 resolve_cache_and_index(byte_no, noreg, cache, index, sizeof(u2)); 2412 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2470 jvmti_post_field_mod(cache, index, is_static); 2413 jvmti_post_field_mod(cache, index, is_static);
2471 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static); 2414 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2472 2415
2473 // Doug Lea believes this is not needed with current Sparcs (TSO) and Intel (PSO). 2416 // Doug Lea believes this is not needed with current Sparcs (TSO) and Intel (PSO).
2474 // volatile_barrier( ); 2417 // volatile_barrier( );
2706 } 2649 }
2707 2650
2708 void TemplateTable::fast_storefield(TosState state) { 2651 void TemplateTable::fast_storefield(TosState state) {
2709 transition(state, vtos); 2652 transition(state, vtos);
2710 2653
2711 ByteSize base = constantPoolCacheOopDesc::base_offset(); 2654 ByteSize base = ConstantPoolCache::base_offset();
2712 2655
2713 jvmti_post_fast_field_mod(); 2656 jvmti_post_fast_field_mod();
2714 2657
2715 // access constant pool cache 2658 // access constant pool cache
2716 __ get_cache_and_index_at_bcp(rcx, rbx, 1); 2659 __ get_cache_and_index_at_bcp(rcx, rbx, 1);
2825 __ get_cache_and_index_at_bcp(rcx, rbx, 1); 2768 __ get_cache_and_index_at_bcp(rcx, rbx, 1);
2826 // replace index with field offset from cache entry 2769 // replace index with field offset from cache entry
2827 __ movptr(rbx, Address(rcx, 2770 __ movptr(rbx, Address(rcx,
2828 rbx, 2771 rbx,
2829 Address::times_ptr, 2772 Address::times_ptr,
2830 in_bytes(constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f2_offset()))); 2773 in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset())));
2831 2774
2832 2775
2833 // rax,: object 2776 // rax,: object
2834 __ verify_oop(rax); 2777 __ verify_oop(rax);
2835 __ null_check(rax); 2778 __ null_check(rax);
2862 // access constant pool cache 2805 // access constant pool cache
2863 __ get_cache_and_index_at_bcp(rcx, rdx, 2); 2806 __ get_cache_and_index_at_bcp(rcx, rdx, 2);
2864 __ movptr(rbx, Address(rcx, 2807 __ movptr(rbx, Address(rcx,
2865 rdx, 2808 rdx,
2866 Address::times_ptr, 2809 Address::times_ptr,
2867 in_bytes(constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f2_offset()))); 2810 in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset())));
2868 // make sure exception is reported in correct bcp range (getfield is next instruction) 2811 // make sure exception is reported in correct bcp range (getfield is next instruction)
2869 __ increment(rsi); 2812 __ increment(rsi);
2870 __ null_check(rax); 2813 __ null_check(rax);
2871 const Address lo = Address(rax, rbx, Address::times_1, 0*wordSize); 2814 const Address lo = Address(rax, rbx, Address::times_1, 0*wordSize);
2872 if (state == itos) { 2815 if (state == itos) {
2924 load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual, false, is_invokedynamic); 2867 load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual, false, is_invokedynamic);
2925 2868
2926 // maybe push appendix to arguments (just before return address) 2869 // maybe push appendix to arguments (just before return address)
2927 if (is_invokedynamic || is_invokehandle) { 2870 if (is_invokedynamic || is_invokehandle) {
2928 Label L_no_push; 2871 Label L_no_push;
2929 __ verify_oop(index);
2930 __ testl(flags, (1 << ConstantPoolCacheEntry::has_appendix_shift)); 2872 __ testl(flags, (1 << ConstantPoolCacheEntry::has_appendix_shift));
2931 __ jccb(Assembler::zero, L_no_push); 2873 __ jccb(Assembler::zero, L_no_push);
2932 // Push the appendix as a trailing parameter. 2874 // Push the appendix as a trailing parameter.
2933 // This must be done before we get the receiver, 2875 // This must be done before we get the receiver,
2934 // since the parameter_size includes it. 2876 // since the parameter_size includes it.
2877 __ push(rbx);
2878 __ mov(rbx, index);
2879 __ load_resolved_reference_at_index(index, rbx);
2880 __ pop(rbx);
2935 __ push(index); // push appendix (MethodType, CallSite, etc.) 2881 __ push(index); // push appendix (MethodType, CallSite, etc.)
2936 __ bind(L_no_push); 2882 __ bind(L_no_push);
2937 } 2883 }
2938 2884
2939 // load receiver if needed (note: no return address pushed yet) 2885 // load receiver if needed (note: no return address pushed yet)
2990 __ andl(rax, (1 << ConstantPoolCacheEntry::is_vfinal_shift)); 2936 __ andl(rax, (1 << ConstantPoolCacheEntry::is_vfinal_shift));
2991 __ jcc(Assembler::zero, notFinal); 2937 __ jcc(Assembler::zero, notFinal);
2992 2938
2993 const Register method = index; // method must be rbx 2939 const Register method = index; // method must be rbx
2994 assert(method == rbx, 2940 assert(method == rbx,
2995 "methodOop must be rbx for interpreter calling convention"); 2941 "Method* must be rbx for interpreter calling convention");
2996 2942
2997 // do the call - the index is actually the method to call 2943 // do the call - the index is actually the method to call
2998 // that is, f2 is a vtable index if !is_vfinal, else f2 is a methodOop 2944 // that is, f2 is a vtable index if !is_vfinal, else f2 is a Method*
2999 __ verify_oop(method);
3000 2945
3001 // It's final, need a null check here! 2946 // It's final, need a null check here!
3002 __ null_check(recv); 2947 __ null_check(recv);
3003 2948
3004 // profile this call 2949 // profile this call
3009 __ bind(notFinal); 2954 __ bind(notFinal);
3010 2955
3011 // get receiver klass 2956 // get receiver klass
3012 __ null_check(recv, oopDesc::klass_offset_in_bytes()); 2957 __ null_check(recv, oopDesc::klass_offset_in_bytes());
3013 __ load_klass(rax, recv); 2958 __ load_klass(rax, recv);
3014 __ verify_oop(rax);
3015 2959
3016 // profile this call 2960 // profile this call
3017 __ profile_virtual_call(rax, rdi, rdx); 2961 __ profile_virtual_call(rax, rdi, rdx);
3018 2962
3019 // get target methodOop & entry point 2963 // get target Method* & entry point
3020 __ lookup_virtual_method(rax, index, method); 2964 __ lookup_virtual_method(rax, index, method);
3021 __ jump_from_interpreted(method, rdx); 2965 __ jump_from_interpreted(method, rdx);
3022 } 2966 }
3023 2967
3024 2968
3039 2983
3040 2984
3041 void TemplateTable::invokespecial(int byte_no) { 2985 void TemplateTable::invokespecial(int byte_no) {
3042 transition(vtos, vtos); 2986 transition(vtos, vtos);
3043 assert(byte_no == f1_byte, "use this argument"); 2987 assert(byte_no == f1_byte, "use this argument");
3044 prepare_invoke(byte_no, rbx, noreg, // get f1 methodOop 2988 prepare_invoke(byte_no, rbx, noreg, // get f1 Method*
3045 rcx); // get receiver also for null check 2989 rcx); // get receiver also for null check
3046 __ verify_oop(rcx); 2990 __ verify_oop(rcx);
3047 __ null_check(rcx); 2991 __ null_check(rcx);
3048 // do the call 2992 // do the call
3049 __ verify_oop(rbx);
3050 __ profile_call(rax); 2993 __ profile_call(rax);
3051 __ jump_from_interpreted(rbx, rax); 2994 __ jump_from_interpreted(rbx, rax);
3052 } 2995 }
3053 2996
3054 2997
3055 void TemplateTable::invokestatic(int byte_no) { 2998 void TemplateTable::invokestatic(int byte_no) {
3056 transition(vtos, vtos); 2999 transition(vtos, vtos);
3057 assert(byte_no == f1_byte, "use this argument"); 3000 assert(byte_no == f1_byte, "use this argument");
3058 prepare_invoke(byte_no, rbx); // get f1 methodOop 3001 prepare_invoke(byte_no, rbx); // get f1 Method*
3059 // do the call 3002 // do the call
3060 __ verify_oop(rbx);
3061 __ profile_call(rax); 3003 __ profile_call(rax);
3062 __ jump_from_interpreted(rbx, rax); 3004 __ jump_from_interpreted(rbx, rax);
3063 } 3005 }
3064 3006
3065 3007
3071 3013
3072 3014
3073 void TemplateTable::invokeinterface(int byte_no) { 3015 void TemplateTable::invokeinterface(int byte_no) {
3074 transition(vtos, vtos); 3016 transition(vtos, vtos);
3075 assert(byte_no == f1_byte, "use this argument"); 3017 assert(byte_no == f1_byte, "use this argument");
3076 prepare_invoke(byte_no, rax, rbx, // get f1 klassOop, f2 itable index 3018 prepare_invoke(byte_no, rax, rbx, // get f1 Klass*, f2 itable index
3077 rcx, rdx); // recv, flags 3019 rcx, rdx); // recv, flags
3078 3020
3079 // rax: interface klass (from f1) 3021 // rax: interface klass (from f1)
3080 // rbx: itable index (from f2) 3022 // rbx: itable index (from f2)
3081 // rcx: receiver 3023 // rcx: receiver
3095 3037
3096 // Get receiver klass into rdx - also a null check 3038 // Get receiver klass into rdx - also a null check
3097 __ restore_locals(); // restore rdi 3039 __ restore_locals(); // restore rdi
3098 __ null_check(rcx, oopDesc::klass_offset_in_bytes()); 3040 __ null_check(rcx, oopDesc::klass_offset_in_bytes());
3099 __ load_klass(rdx, rcx); 3041 __ load_klass(rdx, rcx);
3100 __ verify_oop(rdx);
3101 3042
3102 // profile this call 3043 // profile this call
3103 __ profile_virtual_call(rdx, rsi, rdi); 3044 __ profile_virtual_call(rdx, rsi, rdi);
3104 3045
3105 Label no_such_interface, no_such_method; 3046 Label no_such_interface, no_such_method;
3108 rdx, rax, rbx, 3049 rdx, rax, rbx,
3109 // outputs: method, scan temp. reg 3050 // outputs: method, scan temp. reg
3110 rbx, rsi, 3051 rbx, rsi,
3111 no_such_interface); 3052 no_such_interface);
3112 3053
3113 // rbx: methodOop to call 3054 // rbx: Method* to call
3114 // rcx: receiver 3055 // rcx: receiver
3115 // Check for abstract method error 3056 // Check for abstract method error
3116 // Note: This should be done more efficiently via a throw_abstract_method_error 3057 // Note: This should be done more efficiently via a throw_abstract_method_error
3117 // interpreter entry point and a conditional jump to it in case of a null 3058 // interpreter entry point and a conditional jump to it in case of a null
3118 // method. 3059 // method.
3119 __ testptr(rbx, rbx); 3060 __ testptr(rbx, rbx);
3120 __ jcc(Assembler::zero, no_such_method); 3061 __ jcc(Assembler::zero, no_such_method);
3121 3062
3122 // do the call 3063 // do the call
3123 // rcx: receiver 3064 // rcx: receiver
3124 // rbx,: methodOop 3065 // rbx,: Method*
3125 __ jump_from_interpreted(rbx, rdx); 3066 __ jump_from_interpreted(rbx, rdx);
3126 __ should_not_reach_here(); 3067 __ should_not_reach_here();
3127 3068
3128 // exception handling code follows... 3069 // exception handling code follows...
3129 // note: must restore interpreter registers to canonical 3070 // note: must restore interpreter registers to canonical
3149 __ should_not_reach_here(); 3090 __ should_not_reach_here();
3150 } 3091 }
3151 3092
3152 void TemplateTable::invokehandle(int byte_no) { 3093 void TemplateTable::invokehandle(int byte_no) {
3153 transition(vtos, vtos); 3094 transition(vtos, vtos);
3154 assert(byte_no == f12_oop, "use this argument"); 3095 assert(byte_no == f1_byte, "use this argument");
3155 const Register rbx_method = rbx; // (from f2) 3096 const Register rbx_method = rbx; // (from f2)
3156 const Register rax_mtype = rax; // (from f1) 3097 const Register rax_mtype = rax; // (from f1)
3157 const Register rcx_recv = rcx; 3098 const Register rcx_recv = rcx;
3158 const Register rdx_flags = rdx; 3099 const Register rdx_flags = rdx;
3159 3100
3162 __ should_not_reach_here(); 3103 __ should_not_reach_here();
3163 return; 3104 return;
3164 } 3105 }
3165 3106
3166 prepare_invoke(byte_no, 3107 prepare_invoke(byte_no,
3167 rbx_method, rax_mtype, // get f2 methodOop, f1 MethodType 3108 rbx_method, rax_mtype, // get f2 Method*, f1 MethodType
3168 rcx_recv); 3109 rcx_recv);
3169 __ verify_oop(rbx_method); 3110 __ verify_oop(rbx_method);
3170 __ verify_oop(rcx_recv); 3111 __ verify_oop(rcx_recv);
3171 __ null_check(rcx_recv); 3112 __ null_check(rcx_recv);
3172 3113
3179 } 3120 }
3180 3121
3181 3122
3182 void TemplateTable::invokedynamic(int byte_no) { 3123 void TemplateTable::invokedynamic(int byte_no) {
3183 transition(vtos, vtos); 3124 transition(vtos, vtos);
3184 assert(byte_no == f12_oop, "use this argument"); 3125 assert(byte_no == f1_byte, "use this argument");
3185 3126
3186 if (!EnableInvokeDynamic) { 3127 if (!EnableInvokeDynamic) {
3187 // We should not encounter this bytecode if !EnableInvokeDynamic. 3128 // We should not encounter this bytecode if !EnableInvokeDynamic.
3188 // The verifier will stop it. However, if we get past the verifier, 3129 // The verifier will stop it. However, if we get past the verifier,
3189 // this will stop the thread in a reasonable way, without crashing the JVM. 3130 // this will stop the thread in a reasonable way, without crashing the JVM.
3197 const Register rbx_method = rbx; 3138 const Register rbx_method = rbx;
3198 const Register rax_callsite = rax; 3139 const Register rax_callsite = rax;
3199 3140
3200 prepare_invoke(byte_no, rbx_method, rax_callsite); 3141 prepare_invoke(byte_no, rbx_method, rax_callsite);
3201 3142
3202 // rax: CallSite object (from f1) 3143 // rax: CallSite object (from cpool->resolved_references[])
3203 // rbx: MH.linkToCallSite method (from f2) 3144 // rbx: MH.linkToCallSite method (from f2)
3204 3145
3205 // Note: rax_callsite is already pushed by prepare_invoke 3146 // Note: rax_callsite is already pushed by prepare_invoke
3206 3147
3207 // %%% should make a type profile for any invokedynamic that takes a ref argument 3148 // %%% should make a type profile for any invokedynamic that takes a ref argument
3227 Label allocate_shared; 3168 Label allocate_shared;
3228 3169
3229 __ get_cpool_and_tags(rcx, rax); 3170 __ get_cpool_and_tags(rcx, rax);
3230 3171
3231 // Make sure the class we're about to instantiate has been resolved. 3172 // Make sure the class we're about to instantiate has been resolved.
3232 // This is done before loading instanceKlass to be consistent with the order 3173 // This is done before loading InstanceKlass to be consistent with the order
3233 // how Constant Pool is updated (see constantPoolOopDesc::klass_at_put) 3174 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3234 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; 3175 const int tags_offset = Array<u1>::base_offset_in_bytes();
3235 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class); 3176 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3236 __ jcc(Assembler::notEqual, slow_case_no_pop); 3177 __ jcc(Assembler::notEqual, slow_case_no_pop);
3237 3178
3238 // get instanceKlass 3179 // get InstanceKlass
3239 __ movptr(rcx, Address(rcx, rdx, Address::times_ptr, sizeof(constantPoolOopDesc))); 3180 __ movptr(rcx, Address(rcx, rdx, Address::times_ptr, sizeof(ConstantPool)));
3240 __ push(rcx); // save the contexts of klass for initializing the header 3181 __ push(rcx); // save the contexts of klass for initializing the header
3241 3182
3242 // make sure klass is initialized & doesn't have finalizer 3183 // make sure klass is initialized & doesn't have finalizer
3243 // make sure klass is fully initialized 3184 // make sure klass is fully initialized
3244 __ cmpb(Address(rcx, instanceKlass::init_state_offset()), instanceKlass::fully_initialized); 3185 __ cmpb(Address(rcx, InstanceKlass::init_state_offset()), InstanceKlass::fully_initialized);
3245 __ jcc(Assembler::notEqual, slow_case); 3186 __ jcc(Assembler::notEqual, slow_case);
3246 3187
3247 // get instance_size in instanceKlass (scaled to a count of bytes) 3188 // get instance_size in InstanceKlass (scaled to a count of bytes)
3248 __ movl(rdx, Address(rcx, Klass::layout_helper_offset())); 3189 __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
3249 // test to see if it has a finalizer or is malformed in some way 3190 // test to see if it has a finalizer or is malformed in some way
3250 __ testl(rdx, Klass::_lh_instance_slow_path_bit); 3191 __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3251 __ jcc(Assembler::notZero, slow_case); 3192 __ jcc(Assembler::notZero, slow_case);
3252 3193
3412 3353
3413 // Get cpool & tags index 3354 // Get cpool & tags index
3414 __ get_cpool_and_tags(rcx, rdx); // ECX=cpool, EDX=tags array 3355 __ get_cpool_and_tags(rcx, rdx); // ECX=cpool, EDX=tags array
3415 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // EBX=index 3356 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // EBX=index
3416 // See if bytecode has already been quicked 3357 // See if bytecode has already been quicked
3417 __ cmpb(Address(rdx, rbx, Address::times_1, typeArrayOopDesc::header_size(T_BYTE) * wordSize), JVM_CONSTANT_Class); 3358 __ cmpb(Address(rdx, rbx, Address::times_1, Array<u1>::base_offset_in_bytes()), JVM_CONSTANT_Class);
3418 __ jcc(Assembler::equal, quicked); 3359 __ jcc(Assembler::equal, quicked);
3419 3360
3420 __ push(atos); 3361 __ push(atos);
3421 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) ); 3362 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3363 // vm_result_2 has metadata result
3364 // borrow rdi from locals
3365 __ get_thread(rdi);
3366 __ get_vm_result_2(rax, rdi);
3367 __ restore_locals();
3422 __ pop_ptr(rdx); 3368 __ pop_ptr(rdx);
3423 __ jmpb(resolved); 3369 __ jmpb(resolved);
3424 3370
3425 // Get superklass in EAX and subklass in EBX 3371 // Get superklass in EAX and subklass in EBX
3426 __ bind(quicked); 3372 __ bind(quicked);
3427 __ mov(rdx, rax); // Save object in EDX; EAX needed for subtype check 3373 __ mov(rdx, rax); // Save object in EDX; EAX needed for subtype check
3428 __ movptr(rax, Address(rcx, rbx, Address::times_ptr, sizeof(constantPoolOopDesc))); 3374 __ movptr(rax, Address(rcx, rbx, Address::times_ptr, sizeof(ConstantPool)));
3429 3375
3430 __ bind(resolved); 3376 __ bind(resolved);
3431 __ load_klass(rbx, rdx); 3377 __ load_klass(rbx, rdx);
3432 3378
3433 // Generate subtype check. Blows ECX. Resets EDI. Object in EDX. 3379 // Generate subtype check. Blows ECX. Resets EDI. Object in EDX.
3463 3409
3464 // Get cpool & tags index 3410 // Get cpool & tags index
3465 __ get_cpool_and_tags(rcx, rdx); // ECX=cpool, EDX=tags array 3411 __ get_cpool_and_tags(rcx, rdx); // ECX=cpool, EDX=tags array
3466 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // EBX=index 3412 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // EBX=index
3467 // See if bytecode has already been quicked 3413 // See if bytecode has already been quicked
3468 __ cmpb(Address(rdx, rbx, Address::times_1, typeArrayOopDesc::header_size(T_BYTE) * wordSize), JVM_CONSTANT_Class); 3414 __ cmpb(Address(rdx, rbx, Address::times_1, Array<u1>::base_offset_in_bytes()), JVM_CONSTANT_Class);
3469 __ jcc(Assembler::equal, quicked); 3415 __ jcc(Assembler::equal, quicked);
3470 3416
3471 __ push(atos); 3417 __ push(atos);
3472 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) ); 3418 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3419 // vm_result_2 has metadata result
3420 // borrow rdi from locals
3421 __ get_thread(rdi);
3422 __ get_vm_result_2(rax, rdi);
3423 __ restore_locals();
3473 __ pop_ptr(rdx); 3424 __ pop_ptr(rdx);
3474 __ load_klass(rdx, rdx); 3425 __ load_klass(rdx, rdx);
3475 __ jmp(resolved); 3426 __ jmp(resolved);
3476 3427
3477 // Get superklass in EAX and subklass in EDX 3428 // Get superklass in EAX and subklass in EDX
3478 __ bind(quicked); 3429 __ bind(quicked);
3479 __ load_klass(rdx, rax); 3430 __ load_klass(rdx, rax);
3480 __ movptr(rax, Address(rcx, rbx, Address::times_ptr, sizeof(constantPoolOopDesc))); 3431 __ movptr(rax, Address(rcx, rbx, Address::times_ptr, sizeof(ConstantPool)));
3481 3432
3482 __ bind(resolved); 3433 __ bind(resolved);
3483 3434
3484 // Generate subtype check. Blows ECX. Resets EDI. 3435 // Generate subtype check. Blows ECX. Resets EDI.
3485 // Superklass in EAX. Subklass in EDX. 3436 // Superklass in EAX. Subklass in EDX.