comparison src/share/vm/interpreter/bytecodeInterpreter.cpp @ 11198:1e6d5dec4a4e

Merge.
author Christian Humer <christian.humer@gmail.com>
date Mon, 05 Aug 2013 13:20:06 +0200
parents a023ec3452c7
children de6a9e811145 6cc7093e1341
comparison
equal deleted inserted replaced
11197:3479ab380552 11198:1e6d5dec4a4e
479 // account for extra_stack_entries() anymore because at the time when it is called 479 // account for extra_stack_entries() anymore because at the time when it is called
480 // EnableInvokeDynamic was already set to false. 480 // EnableInvokeDynamic was already set to false.
481 // So we have a second version of the assertion which handles the case where EnableInvokeDynamic was 481 // So we have a second version of the assertion which handles the case where EnableInvokeDynamic was
482 // switched off because of the wrong classes. 482 // switched off because of the wrong classes.
483 if (EnableInvokeDynamic || FLAG_IS_CMDLINE(EnableInvokeDynamic)) { 483 if (EnableInvokeDynamic || FLAG_IS_CMDLINE(EnableInvokeDynamic)) {
484 assert(abs(istate->_stack_base - istate->_stack_limit) == (istate->_method->max_stack() + 1), "bad stack limit"); 484 assert(labs(istate->_stack_base - istate->_stack_limit) == (istate->_method->max_stack() + 1), "bad stack limit");
485 } else { 485 } else {
486 const int extra_stack_entries = Method::extra_stack_entries_for_indy; 486 const int extra_stack_entries = Method::extra_stack_entries_for_jsr292;
487 assert(labs(istate->_stack_base - istate->_stack_limit) == (istate->_method->max_stack() + extra_stack_entries 487 assert(labs(istate->_stack_base - istate->_stack_limit) == (istate->_method->max_stack() + extra_stack_entries
488 + 1), "bad stack limit"); 488 + 1), "bad stack limit");
489 } 489 }
490 #ifndef SHARK 490 #ifndef SHARK
491 IA32_ONLY(assert(istate->_stack_limit == istate->_thread->last_Java_sp() + 1, "wrong")); 491 IA32_ONLY(assert(istate->_stack_limit == istate->_thread->last_Java_sp() + 1, "wrong"));
1579 1579
1580 /* 32-bit loads. These handle conversion from < 32-bit types */ 1580 /* 32-bit loads. These handle conversion from < 32-bit types */
1581 #define ARRAY_LOADTO32(T, T2, format, stackRes, extra) \ 1581 #define ARRAY_LOADTO32(T, T2, format, stackRes, extra) \
1582 { \ 1582 { \
1583 ARRAY_INTRO(-2); \ 1583 ARRAY_INTRO(-2); \
1584 extra; \ 1584 (void)extra; \
1585 SET_ ## stackRes(*(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)), \ 1585 SET_ ## stackRes(*(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)), \
1586 -2); \ 1586 -2); \
1587 UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1); \ 1587 UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1); \
1588 } 1588 }
1589 1589
1590 /* 64-bit loads */ 1590 /* 64-bit loads */
1591 #define ARRAY_LOADTO64(T,T2, stackRes, extra) \ 1591 #define ARRAY_LOADTO64(T,T2, stackRes, extra) \
1592 { \ 1592 { \
1593 ARRAY_INTRO(-2); \ 1593 ARRAY_INTRO(-2); \
1594 SET_ ## stackRes(*(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)), -1); \ 1594 SET_ ## stackRes(*(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)), -1); \
1595 extra; \ 1595 (void)extra; \
1596 UPDATE_PC_AND_CONTINUE(1); \ 1596 UPDATE_PC_AND_CONTINUE(1); \
1597 } 1597 }
1598 1598
1599 CASE(_iaload): 1599 CASE(_iaload):
1600 ARRAY_LOADTO32(T_INT, jint, "%d", STACK_INT, 0); 1600 ARRAY_LOADTO32(T_INT, jint, "%d", STACK_INT, 0);
1601 CASE(_faload): 1601 CASE(_faload):
1615 1615
1616 /* 32-bit stores. These handle conversion to < 32-bit types */ 1616 /* 32-bit stores. These handle conversion to < 32-bit types */
1617 #define ARRAY_STOREFROM32(T, T2, format, stackSrc, extra) \ 1617 #define ARRAY_STOREFROM32(T, T2, format, stackSrc, extra) \
1618 { \ 1618 { \
1619 ARRAY_INTRO(-3); \ 1619 ARRAY_INTRO(-3); \
1620 extra; \ 1620 (void)extra; \
1621 *(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)) = stackSrc( -1); \ 1621 *(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)) = stackSrc( -1); \
1622 UPDATE_PC_AND_TOS_AND_CONTINUE(1, -3); \ 1622 UPDATE_PC_AND_TOS_AND_CONTINUE(1, -3); \
1623 } 1623 }
1624 1624
1625 /* 64-bit stores */ 1625 /* 64-bit stores */
1626 #define ARRAY_STOREFROM64(T, T2, stackSrc, extra) \ 1626 #define ARRAY_STOREFROM64(T, T2, stackSrc, extra) \
1627 { \ 1627 { \
1628 ARRAY_INTRO(-4); \ 1628 ARRAY_INTRO(-4); \
1629 extra; \ 1629 (void)extra; \
1630 *(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)) = stackSrc( -1); \ 1630 *(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)) = stackSrc( -1); \
1631 UPDATE_PC_AND_TOS_AND_CONTINUE(1, -4); \ 1631 UPDATE_PC_AND_TOS_AND_CONTINUE(1, -4); \
1632 } 1632 }
1633 1633
1634 CASE(_iastore): 1634 CASE(_iastore):
2231 handle_exception); 2231 handle_exception);
2232 cache = cp->constant_pool()->invokedynamic_cp_cache_entry_at(index); 2232 cache = cp->constant_pool()->invokedynamic_cp_cache_entry_at(index);
2233 } 2233 }
2234 2234
2235 Method* method = cache->f1_as_method(); 2235 Method* method = cache->f1_as_method();
2236 VERIFY_OOP(method); 2236 if (VerifyOops) method->verify();
2237 2237
2238 if (cache->has_appendix()) { 2238 if (cache->has_appendix()) {
2239 ConstantPool* constants = METHOD->constants(); 2239 ConstantPool* constants = METHOD->constants();
2240 SET_STACK_OBJECT(cache->appendix_if_resolved(constants), 0); 2240 SET_STACK_OBJECT(cache->appendix_if_resolved(constants), 0);
2241 MORE_STACK(1); 2241 MORE_STACK(1);
2263 handle_exception); 2263 handle_exception);
2264 cache = cp->entry_at(index); 2264 cache = cp->entry_at(index);
2265 } 2265 }
2266 2266
2267 Method* method = cache->f1_as_method(); 2267 Method* method = cache->f1_as_method();
2268 2268 if (VerifyOops) method->verify();
2269 VERIFY_OOP(method);
2270 2269
2271 if (cache->has_appendix()) { 2270 if (cache->has_appendix()) {
2272 ConstantPool* constants = METHOD->constants(); 2271 ConstantPool* constants = METHOD->constants();
2273 SET_STACK_OBJECT(cache->appendix_if_resolved(constants), 0); 2272 SET_STACK_OBJECT(cache->appendix_if_resolved(constants), 0);
2274 MORE_STACK(1); 2273 MORE_STACK(1);