comparison src/cpu/x86/vm/templateTable_x86_64.cpp @ 4137:04b9a2566eec

Merge with hsx23/hotspot.
author Thomas Wuerthinger <thomas.wuerthinger@oracle.com>
date Sat, 17 Dec 2011 21:40:27 +0100
parents fdb992d83a87
children 52b5d32fbfaf 069ab3f976d3
comparison
equal deleted inserted replaced
3737:9dc19b7d89a3 4137:04b9a2566eec
201 Address TemplateTable::at_bcp(int offset) { 201 Address TemplateTable::at_bcp(int offset) {
202 assert(_desc->uses_bcp(), "inconsistent uses_bcp information"); 202 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
203 return Address(r13, offset); 203 return Address(r13, offset);
204 } 204 }
205 205
206 void TemplateTable::patch_bytecode(Bytecodes::Code bytecode, Register bc, 206 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
207 Register scratch, 207 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
208 bool load_bc_into_scratch/*=true*/) { 208 int byte_no) {
209 if (!RewriteBytecodes) { 209 if (!RewriteBytecodes) return;
210 return; 210 Label L_patch_done;
211 } 211
212 // the pair bytecodes have already done the load. 212 switch (bc) {
213 if (load_bc_into_scratch) { 213 case Bytecodes::_fast_aputfield:
214 __ movl(bc, bytecode); 214 case Bytecodes::_fast_bputfield:
215 } 215 case Bytecodes::_fast_cputfield:
216 Label patch_done; 216 case Bytecodes::_fast_dputfield:
217 case Bytecodes::_fast_fputfield:
218 case Bytecodes::_fast_iputfield:
219 case Bytecodes::_fast_lputfield:
220 case Bytecodes::_fast_sputfield:
221 {
222 // We skip bytecode quickening for putfield instructions when
223 // the put_code written to the constant pool cache is zero.
224 // This is required so that every execution of this instruction
225 // calls out to InterpreterRuntime::resolve_get_put to do
226 // additional, required work.
227 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
228 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
229 __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
230 __ movl(bc_reg, bc);
231 __ cmpl(temp_reg, (int) 0);
232 __ jcc(Assembler::zero, L_patch_done); // don't patch
233 }
234 break;
235 default:
236 assert(byte_no == -1, "sanity");
237 // the pair bytecodes have already done the load.
238 if (load_bc_into_bc_reg) {
239 __ movl(bc_reg, bc);
240 }
241 }
242
217 if (JvmtiExport::can_post_breakpoint()) { 243 if (JvmtiExport::can_post_breakpoint()) {
218 Label fast_patch; 244 Label L_fast_patch;
219 // if a breakpoint is present we can't rewrite the stream directly 245 // if a breakpoint is present we can't rewrite the stream directly
220 __ movzbl(scratch, at_bcp(0)); 246 __ movzbl(temp_reg, at_bcp(0));
221 __ cmpl(scratch, Bytecodes::_breakpoint); 247 __ cmpl(temp_reg, Bytecodes::_breakpoint);
222 __ jcc(Assembler::notEqual, fast_patch); 248 __ jcc(Assembler::notEqual, L_fast_patch);
223 __ get_method(scratch); 249 __ get_method(temp_reg);
224 // Let breakpoint table handling rewrite to quicker bytecode 250 // Let breakpoint table handling rewrite to quicker bytecode
225 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), scratch, r13, bc); 251 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), temp_reg, r13, bc_reg);
226 #ifndef ASSERT 252 #ifndef ASSERT
227 __ jmpb(patch_done); 253 __ jmpb(L_patch_done);
228 #else 254 #else
229 __ jmp(patch_done); 255 __ jmp(L_patch_done);
230 #endif 256 #endif
231 __ bind(fast_patch); 257 __ bind(L_fast_patch);
232 } 258 }
259
233 #ifdef ASSERT 260 #ifdef ASSERT
234 Label okay; 261 Label L_okay;
235 __ load_unsigned_byte(scratch, at_bcp(0)); 262 __ load_unsigned_byte(temp_reg, at_bcp(0));
236 __ cmpl(scratch, (int) Bytecodes::java_code(bytecode)); 263 __ cmpl(temp_reg, (int) Bytecodes::java_code(bc));
237 __ jcc(Assembler::equal, okay); 264 __ jcc(Assembler::equal, L_okay);
238 __ cmpl(scratch, bc); 265 __ cmpl(temp_reg, bc_reg);
239 __ jcc(Assembler::equal, okay); 266 __ jcc(Assembler::equal, L_okay);
240 __ stop("patching the wrong bytecode"); 267 __ stop("patching the wrong bytecode");
241 __ bind(okay); 268 __ bind(L_okay);
242 #endif 269 #endif
270
243 // patch bytecode 271 // patch bytecode
244 __ movb(at_bcp(0), bc); 272 __ movb(at_bcp(0), bc_reg);
245 __ bind(patch_done); 273 __ bind(L_patch_done);
246 } 274 }
247 275
248 276
249 // Individual instructions 277 // Individual instructions
250 278
2096 size_t index_size) { 2124 size_t index_size) {
2097 const Register temp = rbx; 2125 const Register temp = rbx;
2098 assert_different_registers(result, Rcache, index, temp); 2126 assert_different_registers(result, Rcache, index, temp);
2099 2127
2100 Label resolved; 2128 Label resolved;
2101 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2102 if (byte_no == f1_oop) { 2129 if (byte_no == f1_oop) {
2103 // We are resolved if the f1 field contains a non-null object (CallSite, etc.) 2130 // We are resolved if the f1 field contains a non-null object (CallSite, etc.)
2104 // This kind of CP cache entry does not need to match the flags byte, because 2131 // This kind of CP cache entry does not need to match the flags byte, because
2105 // there is a 1-1 relation between bytecode type and CP entry type. 2132 // there is a 1-1 relation between bytecode type and CP entry type.
2106 assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD) 2133 assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD)
2134 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2107 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset())); 2135 __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()));
2108 __ testptr(result, result); 2136 __ testptr(result, result);
2109 __ jcc(Assembler::notEqual, resolved); 2137 __ jcc(Assembler::notEqual, resolved);
2110 } else { 2138 } else {
2111 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range"); 2139 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2112 assert(result == noreg, ""); //else change code for setting result 2140 assert(result == noreg, ""); //else change code for setting result
2113 const int shift_count = (1 + byte_no) * BitsPerByte; 2141 __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2114 __ movl(temp, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset())); 2142 __ cmpl(temp, (int) bytecode()); // have we resolved this bytecode?
2115 __ shrl(temp, shift_count);
2116 // have we resolved this bytecode?
2117 __ andl(temp, 0xFF);
2118 __ cmpl(temp, (int) bytecode());
2119 __ jcc(Assembler::equal, resolved); 2143 __ jcc(Assembler::equal, resolved);
2120 } 2144 }
2121 2145
2122 // resolve first time through 2146 // resolve first time through
2123 address entry; 2147 address entry;
2505 __ shrl(flags, ConstantPoolCacheEntry::tosBits); 2529 __ shrl(flags, ConstantPoolCacheEntry::tosBits);
2506 2530
2507 assert(btos == 0, "change code, btos != 0"); 2531 assert(btos == 0, "change code, btos != 0");
2508 __ andl(flags, 0x0f); 2532 __ andl(flags, 0x0f);
2509 __ jcc(Assembler::notZero, notByte); 2533 __ jcc(Assembler::notZero, notByte);
2534
2510 // btos 2535 // btos
2511 __ pop(btos); 2536 {
2512 if (!is_static) pop_and_check_object(obj); 2537 __ pop(btos);
2513 __ movb(field, rax); 2538 if (!is_static) pop_and_check_object(obj);
2514 if (!is_static) { 2539 __ movb(field, rax);
2515 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx); 2540 if (!is_static) {
2516 } 2541 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2517 __ jmp(Done); 2542 }
2543 __ jmp(Done);
2544 }
2518 2545
2519 __ bind(notByte); 2546 __ bind(notByte);
2520 __ cmpl(flags, atos); 2547 __ cmpl(flags, atos);
2521 __ jcc(Assembler::notEqual, notObj); 2548 __ jcc(Assembler::notEqual, notObj);
2549
2522 // atos 2550 // atos
2523 __ pop(atos); 2551 {
2524 if (!is_static) pop_and_check_object(obj); 2552 __ pop(atos);
2525 2553 if (!is_static) pop_and_check_object(obj);
2526 // Store into the field 2554 // Store into the field
2527 do_oop_store(_masm, field, rax, _bs->kind(), false); 2555 do_oop_store(_masm, field, rax, _bs->kind(), false);
2528 2556 if (!is_static) {
2529 if (!is_static) { 2557 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2530 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx); 2558 }
2531 } 2559 __ jmp(Done);
2532 __ jmp(Done); 2560 }
2533 2561
2534 __ bind(notObj); 2562 __ bind(notObj);
2535 __ cmpl(flags, itos); 2563 __ cmpl(flags, itos);
2536 __ jcc(Assembler::notEqual, notInt); 2564 __ jcc(Assembler::notEqual, notInt);
2565
2537 // itos 2566 // itos
2538 __ pop(itos); 2567 {
2539 if (!is_static) pop_and_check_object(obj); 2568 __ pop(itos);
2540 __ movl(field, rax); 2569 if (!is_static) pop_and_check_object(obj);
2541 if (!is_static) { 2570 __ movl(field, rax);
2542 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx); 2571 if (!is_static) {
2543 } 2572 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2544 __ jmp(Done); 2573 }
2574 __ jmp(Done);
2575 }
2545 2576
2546 __ bind(notInt); 2577 __ bind(notInt);
2547 __ cmpl(flags, ctos); 2578 __ cmpl(flags, ctos);
2548 __ jcc(Assembler::notEqual, notChar); 2579 __ jcc(Assembler::notEqual, notChar);
2580
2549 // ctos 2581 // ctos
2550 __ pop(ctos); 2582 {
2551 if (!is_static) pop_and_check_object(obj); 2583 __ pop(ctos);
2552 __ movw(field, rax); 2584 if (!is_static) pop_and_check_object(obj);
2553 if (!is_static) { 2585 __ movw(field, rax);
2554 patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx); 2586 if (!is_static) {
2555 } 2587 patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
2556 __ jmp(Done); 2588 }
2589 __ jmp(Done);
2590 }
2557 2591
2558 __ bind(notChar); 2592 __ bind(notChar);
2559 __ cmpl(flags, stos); 2593 __ cmpl(flags, stos);
2560 __ jcc(Assembler::notEqual, notShort); 2594 __ jcc(Assembler::notEqual, notShort);
2595
2561 // stos 2596 // stos
2562 __ pop(stos); 2597 {
2563 if (!is_static) pop_and_check_object(obj); 2598 __ pop(stos);
2564 __ movw(field, rax); 2599 if (!is_static) pop_and_check_object(obj);
2565 if (!is_static) { 2600 __ movw(field, rax);
2566 patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx); 2601 if (!is_static) {
2567 } 2602 patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
2568 __ jmp(Done); 2603 }
2604 __ jmp(Done);
2605 }
2569 2606
2570 __ bind(notShort); 2607 __ bind(notShort);
2571 __ cmpl(flags, ltos); 2608 __ cmpl(flags, ltos);
2572 __ jcc(Assembler::notEqual, notLong); 2609 __ jcc(Assembler::notEqual, notLong);
2610
2573 // ltos 2611 // ltos
2574 __ pop(ltos); 2612 {
2575 if (!is_static) pop_and_check_object(obj); 2613 __ pop(ltos);
2576 __ movq(field, rax); 2614 if (!is_static) pop_and_check_object(obj);
2577 if (!is_static) { 2615 __ movq(field, rax);
2578 patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx); 2616 if (!is_static) {
2579 } 2617 patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
2580 __ jmp(Done); 2618 }
2619 __ jmp(Done);
2620 }
2581 2621
2582 __ bind(notLong); 2622 __ bind(notLong);
2583 __ cmpl(flags, ftos); 2623 __ cmpl(flags, ftos);
2584 __ jcc(Assembler::notEqual, notFloat); 2624 __ jcc(Assembler::notEqual, notFloat);
2625
2585 // ftos 2626 // ftos
2586 __ pop(ftos); 2627 {
2587 if (!is_static) pop_and_check_object(obj); 2628 __ pop(ftos);
2588 __ movflt(field, xmm0); 2629 if (!is_static) pop_and_check_object(obj);
2589 if (!is_static) { 2630 __ movflt(field, xmm0);
2590 patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx); 2631 if (!is_static) {
2591 } 2632 patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
2592 __ jmp(Done); 2633 }
2634 __ jmp(Done);
2635 }
2593 2636
2594 __ bind(notFloat); 2637 __ bind(notFloat);
2595 #ifdef ASSERT 2638 #ifdef ASSERT
2596 __ cmpl(flags, dtos); 2639 __ cmpl(flags, dtos);
2597 __ jcc(Assembler::notEqual, notDouble); 2640 __ jcc(Assembler::notEqual, notDouble);
2598 #endif 2641 #endif
2642
2599 // dtos 2643 // dtos
2600 __ pop(dtos); 2644 {
2601 if (!is_static) pop_and_check_object(obj); 2645 __ pop(dtos);
2602 __ movdbl(field, xmm0); 2646 if (!is_static) pop_and_check_object(obj);
2603 if (!is_static) { 2647 __ movdbl(field, xmm0);
2604 patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx); 2648 if (!is_static) {
2649 patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
2650 }
2605 } 2651 }
2606 2652
2607 #ifdef ASSERT 2653 #ifdef ASSERT
2608 __ jmp(Done); 2654 __ jmp(Done);
2609 2655
2610 __ bind(notDouble); 2656 __ bind(notDouble);
2611 __ stop("Bad state"); 2657 __ stop("Bad state");
2612 #endif 2658 #endif
2613 2659
2614 __ bind(Done); 2660 __ bind(Done);
2661
2615 // Check for volatile store 2662 // Check for volatile store
2616 __ testl(rdx, rdx); 2663 __ testl(rdx, rdx);
2617 __ jcc(Assembler::zero, notVolatile); 2664 __ jcc(Assembler::zero, notVolatile);
2618 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad | 2665 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2619 Assembler::StoreStore)); 2666 Assembler::StoreStore));
2620
2621 __ bind(notVolatile); 2667 __ bind(notVolatile);
2622 } 2668 }
2623 2669
2624 void TemplateTable::putfield(int byte_no) { 2670 void TemplateTable::putfield(int byte_no) {
2625 putfield_or_static(byte_no, false); 2671 putfield_or_static(byte_no, false);