comparison src/share/vm/opto/output.cpp @ 18041:52b4284cb496

Merge with jdk8u20-b26
author Gilles Duboscq <duboscq@ssw.jku.at>
date Wed, 15 Oct 2014 16:02:50 +0200
parents 562643f42b65 0bf37f737702
children 7848fc12602b
comparison
equal deleted inserted replaced
17606:45d7b2c7029d 18041:52b4284cb496
40 #include "opto/subnode.hpp" 40 #include "opto/subnode.hpp"
41 #include "opto/type.hpp" 41 #include "opto/type.hpp"
42 #include "runtime/handles.inline.hpp" 42 #include "runtime/handles.inline.hpp"
43 #include "utilities/xmlstream.hpp" 43 #include "utilities/xmlstream.hpp"
44 44
45 extern uint size_exception_handler();
46 extern uint size_deopt_handler();
47
48 #ifndef PRODUCT 45 #ifndef PRODUCT
49 #define DEBUG_ARG(x) , x 46 #define DEBUG_ARG(x) , x
50 #else 47 #else
51 #define DEBUG_ARG(x) 48 #define DEBUG_ARG(x)
52 #endif 49 #endif
53
54 extern int emit_exception_handler(CodeBuffer &cbuf);
55 extern int emit_deopt_handler(CodeBuffer &cbuf);
56 50
57 // Convert Nodes to instruction bits and pass off to the VM 51 // Convert Nodes to instruction bits and pass off to the VM
58 void Compile::Output() { 52 void Compile::Output() {
59 // RootNode goes 53 // RootNode goes
60 assert( _cfg->get_root_block()->number_of_nodes() == 0, "" ); 54 assert( _cfg->get_root_block()->number_of_nodes() == 0, "" );
169 163
170 bool Compile::need_stack_bang(int frame_size_in_bytes) const { 164 bool Compile::need_stack_bang(int frame_size_in_bytes) const {
171 // Determine if we need to generate a stack overflow check. 165 // Determine if we need to generate a stack overflow check.
172 // Do it if the method is not a stub function and 166 // Do it if the method is not a stub function and
173 // has java calls or has frame size > vm_page_size/8. 167 // has java calls or has frame size > vm_page_size/8.
168 // The debug VM checks that deoptimization doesn't trigger an
169 // unexpected stack overflow (compiled method stack banging should
170 // guarantee it doesn't happen) so we always need the stack bang in
171 // a debug VM.
174 return (UseStackBanging && stub_function() == NULL && 172 return (UseStackBanging && stub_function() == NULL &&
175 (has_java_calls() || frame_size_in_bytes > os::vm_page_size()>>3)); 173 (has_java_calls() || frame_size_in_bytes > os::vm_page_size()>>3
174 DEBUG_ONLY(|| true)));
176 } 175 }
177 176
178 bool Compile::need_register_stack_bang() const { 177 bool Compile::need_register_stack_bang() const {
179 // Determine if we need to generate a register stack overflow check. 178 // Determine if we need to generate a register stack overflow check.
180 // This is only used on architectures which have split register 179 // This is only used on architectures which have split register
392 if (nj->is_Mach()) { 391 if (nj->is_Mach()) {
393 MachNode *mach = nj->as_Mach(); 392 MachNode *mach = nj->as_Mach();
394 blk_size += (mach->alignment_required() - 1) * relocInfo::addr_unit(); // assume worst case padding 393 blk_size += (mach->alignment_required() - 1) * relocInfo::addr_unit(); // assume worst case padding
395 reloc_size += mach->reloc(); 394 reloc_size += mach->reloc();
396 if (mach->is_MachCall()) { 395 if (mach->is_MachCall()) {
396 // add size information for trampoline stub
397 // class CallStubImpl is platform-specific and defined in the *.ad files.
398 stub_size += CallStubImpl::size_call_trampoline();
399 reloc_size += CallStubImpl::reloc_call_trampoline();
400
397 MachCallNode *mcall = mach->as_MachCall(); 401 MachCallNode *mcall = mach->as_MachCall();
398 // This destination address is NOT PC-relative 402 // This destination address is NOT PC-relative
399 403
400 mcall->method_set((intptr_t)mcall->entry_point()); 404 mcall->method_set((intptr_t)mcall->entry_point());
401 405
410 // check for all offsets inside this block. 414 // check for all offsets inside this block.
411 if (last_call_adr >= blk_starts[i]) { 415 if (last_call_adr >= blk_starts[i]) {
412 blk_size += nop_size; 416 blk_size += nop_size;
413 } 417 }
414 } 418 }
415 if (mach->avoid_back_to_back()) { 419 if (mach->avoid_back_to_back(MachNode::AVOID_BEFORE)) {
416 // Nop is inserted between "avoid back to back" instructions. 420 // Nop is inserted between "avoid back to back" instructions.
417 // ScheduleAndBundle() can rearrange nodes in a block, 421 // ScheduleAndBundle() can rearrange nodes in a block,
418 // check for all offsets inside this block. 422 // check for all offsets inside this block.
419 if (last_avoid_back_to_back_adr >= blk_starts[i]) { 423 if (last_avoid_back_to_back_adr >= blk_starts[i]) {
420 blk_size += nop_size; 424 blk_size += nop_size;
438 // Remember end of call offset 442 // Remember end of call offset
439 if (nj->is_MachCall() && !nj->is_MachCallLeaf()) { 443 if (nj->is_MachCall() && !nj->is_MachCallLeaf()) {
440 last_call_adr = blk_starts[i]+blk_size; 444 last_call_adr = blk_starts[i]+blk_size;
441 } 445 }
442 // Remember end of avoid_back_to_back offset 446 // Remember end of avoid_back_to_back offset
443 if (nj->is_Mach() && nj->as_Mach()->avoid_back_to_back()) { 447 if (nj->is_Mach() && nj->as_Mach()->avoid_back_to_back(MachNode::AVOID_AFTER)) {
444 last_avoid_back_to_back_adr = blk_starts[i]+blk_size; 448 last_avoid_back_to_back_adr = blk_starts[i]+blk_size;
445 } 449 }
446 } 450 }
447 451
448 // When the next block starts a loop, we may insert pad NOP 452 // When the next block starts a loop, we may insert pad NOP
524 528
525 // Update the jmp_size. 529 // Update the jmp_size.
526 int new_size = replacement->size(_regalloc); 530 int new_size = replacement->size(_regalloc);
527 int diff = br_size - new_size; 531 int diff = br_size - new_size;
528 assert(diff >= (int)nop_size, "short_branch size should be smaller"); 532 assert(diff >= (int)nop_size, "short_branch size should be smaller");
529 // Conservatively take into accound padding between 533 // Conservatively take into account padding between
530 // avoid_back_to_back branches. Previous branch could be 534 // avoid_back_to_back branches. Previous branch could be
531 // converted into avoid_back_to_back branch during next 535 // converted into avoid_back_to_back branch during next
532 // rounds. 536 // rounds.
533 if (needs_padding && replacement->avoid_back_to_back()) { 537 if (needs_padding && replacement->avoid_back_to_back(MachNode::AVOID_BEFORE)) {
534 jmp_offset[i] += nop_size; 538 jmp_offset[i] += nop_size;
535 diff -= nop_size; 539 diff -= nop_size;
536 } 540 }
537 adjust_block_start += diff; 541 adjust_block_start += diff;
538 block->map_node(replacement, idx); 542 block->map_node(replacement, idx);
547 // The jump distance is not short, try again during next iteration. 551 // The jump distance is not short, try again during next iteration.
548 has_short_branch_candidate = true; 552 has_short_branch_candidate = true;
549 } 553 }
550 } // (mach->may_be_short_branch()) 554 } // (mach->may_be_short_branch())
551 if (mach != NULL && (mach->may_be_short_branch() || 555 if (mach != NULL && (mach->may_be_short_branch() ||
552 mach->avoid_back_to_back())) { 556 mach->avoid_back_to_back(MachNode::AVOID_AFTER))) {
553 last_may_be_short_branch_adr = blk_starts[i] + jmp_offset[i] + jmp_size[i]; 557 last_may_be_short_branch_adr = blk_starts[i] + jmp_offset[i] + jmp_size[i];
554 } 558 }
555 blk_starts[i+1] -= adjust_block_start; 559 blk_starts[i+1] -= adjust_block_start;
556 } 560 }
557 } 561 }
1079 } 1083 }
1080 1084
1081 // Compute prolog code size 1085 // Compute prolog code size
1082 _method_size = 0; 1086 _method_size = 0;
1083 _frame_slots = OptoReg::reg2stack(_matcher->_old_SP)+_regalloc->_framesize; 1087 _frame_slots = OptoReg::reg2stack(_matcher->_old_SP)+_regalloc->_framesize;
1084 #ifdef IA64 1088 #if defined(IA64) && !defined(AIX)
1085 if (save_argument_registers()) { 1089 if (save_argument_registers()) {
1086 // 4815101: this is a stub with implicit and unknown precision fp args. 1090 // 4815101: this is a stub with implicit and unknown precision fp args.
1087 // The usual spill mechanism can only generate stfd's in this case, which 1091 // The usual spill mechanism can only generate stfd's in this case, which
1088 // doesn't work if the fp reg to spill contains a single-precision denorm. 1092 // doesn't work if the fp reg to spill contains a single-precision denorm.
1089 // Instead, we hack around the normal spill mechanism using stfspill's and 1093 // Instead, we hack around the normal spill mechanism using stfspill's and
1097 } 1101 }
1098 #endif 1102 #endif
1099 assert(_frame_slots >= 0 && _frame_slots < 1000000, "sanity check"); 1103 assert(_frame_slots >= 0 && _frame_slots < 1000000, "sanity check");
1100 1104
1101 if (has_mach_constant_base_node()) { 1105 if (has_mach_constant_base_node()) {
1106 uint add_size = 0;
1102 // Fill the constant table. 1107 // Fill the constant table.
1103 // Note: This must happen before shorten_branches. 1108 // Note: This must happen before shorten_branches.
1104 for (uint i = 0; i < _cfg->number_of_blocks(); i++) { 1109 for (uint i = 0; i < _cfg->number_of_blocks(); i++) {
1105 Block* b = _cfg->get_block(i); 1110 Block* b = _cfg->get_block(i);
1106 1111
1110 // If the node is a MachConstantNode evaluate the constant 1115 // If the node is a MachConstantNode evaluate the constant
1111 // value section. 1116 // value section.
1112 if (n->is_MachConstant()) { 1117 if (n->is_MachConstant()) {
1113 MachConstantNode* machcon = n->as_MachConstant(); 1118 MachConstantNode* machcon = n->as_MachConstant();
1114 machcon->eval_constant(C); 1119 machcon->eval_constant(C);
1120 } else if (n->is_Mach()) {
1121 // On Power there are more nodes that issue constants.
1122 add_size += (n->as_Mach()->ins_num_consts() * 8);
1115 } 1123 }
1116 } 1124 }
1117 } 1125 }
1118 1126
1119 // Calculate the offsets of the constants and the size of the 1127 // Calculate the offsets of the constants and the size of the
1120 // constant table (including the padding to the next section). 1128 // constant table (including the padding to the next section).
1121 constant_table().calculate_offsets_and_size(); 1129 constant_table().calculate_offsets_and_size();
1122 const_req = constant_table().size(); 1130 const_req = constant_table().size() + add_size;
1123 } 1131 }
1124 1132
1125 // Initialize the space for the BufferBlob used to find and verify 1133 // Initialize the space for the BufferBlob used to find and verify
1126 // instruction size in MachNode::emit_size() 1134 // instruction size in MachNode::emit_size()
1127 init_scratch_buffer_blob(const_req); 1135 init_scratch_buffer_blob(const_req);
1130 // Pre-compute the length of blocks and replace 1138 // Pre-compute the length of blocks and replace
1131 // long branches with short if machine supports it. 1139 // long branches with short if machine supports it.
1132 shorten_branches(blk_starts, code_req, locs_req, stub_req); 1140 shorten_branches(blk_starts, code_req, locs_req, stub_req);
1133 1141
1134 // nmethod and CodeBuffer count stubs & constants as part of method's code. 1142 // nmethod and CodeBuffer count stubs & constants as part of method's code.
1135 int exception_handler_req = size_exception_handler(); 1143 // class HandlerImpl is platform-specific and defined in the *.ad files.
1136 int deopt_handler_req = size_deopt_handler(); 1144 int exception_handler_req = HandlerImpl::size_exception_handler() + MAX_stubs_size; // add marginal slop for handler
1137 exception_handler_req += MAX_stubs_size; // add marginal slop for handler 1145 int deopt_handler_req = HandlerImpl::size_deopt_handler() + MAX_stubs_size; // add marginal slop for handler
1138 deopt_handler_req += MAX_stubs_size; // add marginal slop for handler
1139 stub_req += MAX_stubs_size; // ensure per-stub margin 1146 stub_req += MAX_stubs_size; // ensure per-stub margin
1140 code_req += MAX_inst_size; // ensure per-instruction margin 1147 code_req += MAX_inst_size; // ensure per-instruction margin
1141 1148
1142 if (StressCodeBuffers) 1149 if (StressCodeBuffers)
1143 code_req = const_req = stub_req = exception_handler_req = deopt_handler_req = 0x10; // force expansion 1150 code_req = const_req = stub_req = exception_handler_req = deopt_handler_req = 0x10; // force expansion
1312 // Make sure safepoint node for polling is distinct from a call's 1319 // Make sure safepoint node for polling is distinct from a call's
1313 // return by adding a nop if needed. 1320 // return by adding a nop if needed.
1314 if (is_sfn && !is_mcall && padding == 0 && current_offset == last_call_offset) { 1321 if (is_sfn && !is_mcall && padding == 0 && current_offset == last_call_offset) {
1315 padding = nop_size; 1322 padding = nop_size;
1316 } 1323 }
1317 if (padding == 0 && mach->avoid_back_to_back() && 1324 if (padding == 0 && mach->avoid_back_to_back(MachNode::AVOID_BEFORE) &&
1318 current_offset == last_avoid_back_to_back_offset) { 1325 current_offset == last_avoid_back_to_back_offset) {
1319 // Avoid back to back some instructions. 1326 // Avoid back to back some instructions.
1320 padding = nop_size; 1327 padding = nop_size;
1321 } 1328 }
1322 1329
1388 assert(delay_slot == NULL, "not expecting delay slot node"); 1395 assert(delay_slot == NULL, "not expecting delay slot node");
1389 int br_size = n->size(_regalloc); 1396 int br_size = n->size(_regalloc);
1390 int offset = blk_starts[block_num] - current_offset; 1397 int offset = blk_starts[block_num] - current_offset;
1391 if (block_num >= i) { 1398 if (block_num >= i) {
1392 // Current and following block's offset are not 1399 // Current and following block's offset are not
1393 // finilized yet, adjust distance by the difference 1400 // finalized yet, adjust distance by the difference
1394 // between calculated and final offsets of current block. 1401 // between calculated and final offsets of current block.
1395 offset -= (blk_starts[i] - blk_offset); 1402 offset -= (blk_starts[i] - blk_offset);
1396 } 1403 }
1397 // In the following code a nop could be inserted before 1404 // In the following code a nop could be inserted before
1398 // the branch which will increase the backward distance. 1405 // the branch which will increase the backward distance.
1406 1413
1407 // Update the jmp_size. 1414 // Update the jmp_size.
1408 int new_size = replacement->size(_regalloc); 1415 int new_size = replacement->size(_regalloc);
1409 assert((br_size - new_size) >= (int)nop_size, "short_branch size should be smaller"); 1416 assert((br_size - new_size) >= (int)nop_size, "short_branch size should be smaller");
1410 // Insert padding between avoid_back_to_back branches. 1417 // Insert padding between avoid_back_to_back branches.
1411 if (needs_padding && replacement->avoid_back_to_back()) { 1418 if (needs_padding && replacement->avoid_back_to_back(MachNode::AVOID_BEFORE)) {
1412 MachNode *nop = new (this) MachNopNode(); 1419 MachNode *nop = new (this) MachNopNode();
1413 block->insert_node(nop, j++); 1420 block->insert_node(nop, j++);
1414 _cfg->map_node_to_block(nop, block); 1421 _cfg->map_node_to_block(nop, block);
1415 last_inst++; 1422 last_inst++;
1416 nop->emit(*cb, _regalloc); 1423 nop->emit(*cb, _regalloc);
1469 // Remember the beginning of the previous instruction, in case 1476 // Remember the beginning of the previous instruction, in case
1470 // it's followed by a flag-kill and a null-check. Happens on 1477 // it's followed by a flag-kill and a null-check. Happens on
1471 // Intel all the time, with add-to-memory kind of opcodes. 1478 // Intel all the time, with add-to-memory kind of opcodes.
1472 previous_offset = current_offset; 1479 previous_offset = current_offset;
1473 } 1480 }
1481
1482 // Not an else-if!
1483 // If this is a trap based cmp then add its offset to the list.
1484 if (mach->is_TrapBasedCheckNode()) {
1485 inct_starts[inct_cnt++] = current_offset;
1486 }
1474 } 1487 }
1475 1488
1476 // Verify that there is sufficient space remaining 1489 // Verify that there is sufficient space remaining
1477 cb->insts()->maybe_expand_to_ensure_remaining(MAX_inst_size); 1490 cb->insts()->maybe_expand_to_ensure_remaining(MAX_inst_size);
1478 if ((cb->blob() == NULL) || (!CompileBroker::should_compile_new_jobs())) { 1491 if ((cb->blob() == NULL) || (!CompileBroker::should_compile_new_jobs())) {
1508 1521
1509 if (is_mcall) { 1522 if (is_mcall) {
1510 last_call_offset = current_offset; 1523 last_call_offset = current_offset;
1511 } 1524 }
1512 1525
1513 if (n->is_Mach() && n->as_Mach()->avoid_back_to_back()) { 1526 if (n->is_Mach() && n->as_Mach()->avoid_back_to_back(MachNode::AVOID_AFTER)) {
1514 // Avoid back to back some instructions. 1527 // Avoid back to back some instructions.
1515 last_avoid_back_to_back_offset = current_offset; 1528 last_avoid_back_to_back_offset = current_offset;
1516 } 1529 }
1517 1530
1518 // See if this instruction has a delay slot 1531 // See if this instruction has a delay slot
1613 // ------------------ 1626 // ------------------
1614 // Fill in exception table entries. 1627 // Fill in exception table entries.
1615 FillExceptionTables(inct_cnt, call_returns, inct_starts, blk_labels); 1628 FillExceptionTables(inct_cnt, call_returns, inct_starts, blk_labels);
1616 1629
1617 // Only java methods have exception handlers and deopt handlers 1630 // Only java methods have exception handlers and deopt handlers
1631 // class HandlerImpl is platform-specific and defined in the *.ad files.
1618 if (_method) { 1632 if (_method) {
1619 // Emit the exception handler code. 1633 // Emit the exception handler code.
1620 _code_offsets.set_value(CodeOffsets::Exceptions, emit_exception_handler(*cb)); 1634 _code_offsets.set_value(CodeOffsets::Exceptions, HandlerImpl::emit_exception_handler(*cb));
1621 // Emit the deopt handler code. 1635 // Emit the deopt handler code.
1622 _code_offsets.set_value(CodeOffsets::Deopt, emit_deopt_handler(*cb)); 1636 _code_offsets.set_value(CodeOffsets::Deopt, HandlerImpl::emit_deopt_handler(*cb));
1623 1637
1624 // Emit the MethodHandle deopt handler code (if required). 1638 // Emit the MethodHandle deopt handler code (if required).
1625 if (has_method_handle_invokes()) { 1639 if (has_method_handle_invokes()) {
1626 // We can use the same code as for the normal deopt handler, we 1640 // We can use the same code as for the normal deopt handler, we
1627 // just need a different entry point address. 1641 // just need a different entry point address.
1628 _code_offsets.set_value(CodeOffsets::DeoptMH, emit_deopt_handler(*cb)); 1642 _code_offsets.set_value(CodeOffsets::DeoptMH, HandlerImpl::emit_deopt_handler(*cb));
1629 } 1643 }
1630 } 1644 }
1631 1645
1632 // One last check for failed CodeBuffer::expand: 1646 // One last check for failed CodeBuffer::expand:
1633 if ((cb->blob() == NULL) || (!CompileBroker::should_compile_new_jobs())) { 1647 if ((cb->blob() == NULL) || (!CompileBroker::should_compile_new_jobs())) {
1735 if (n->is_MachNullCheck()) { 1749 if (n->is_MachNullCheck()) {
1736 uint block_num = block->non_connector_successor(0)->_pre_order; 1750 uint block_num = block->non_connector_successor(0)->_pre_order;
1737 _inc_table.append(inct_starts[inct_cnt++], blk_labels[block_num].loc_pos()); 1751 _inc_table.append(inct_starts[inct_cnt++], blk_labels[block_num].loc_pos());
1738 continue; 1752 continue;
1739 } 1753 }
1754 // Handle implicit exception table updates: trap instructions.
1755 if (n->is_Mach() && n->as_Mach()->is_TrapBasedCheckNode()) {
1756 uint block_num = block->non_connector_successor(0)->_pre_order;
1757 _inc_table.append(inct_starts[inct_cnt++], blk_labels[block_num].loc_pos());
1758 continue;
1759 }
1740 } // End of for all blocks fill in exception table entries 1760 } // End of for all blocks fill in exception table entries
1741 } 1761 }
1742 1762
1743 // Static Variables 1763 // Static Variables
1744 #ifndef PRODUCT 1764 #ifndef PRODUCT