comparison src/cpu/x86/vm/templateTable_x86_32.cpp @ 304:dc7f315e41f7

5108146: Merge i486 and amd64 cpu directories 6459804: Want client (c1) compiler for x86_64 (amd64) for faster start-up Reviewed-by: kvn
author never
date Wed, 27 Aug 2008 00:21:55 -0700
parents d1605aabd0a1
children f8199438385b
comparison
equal deleted inserted replaced
303:fa4d1d240383 304:dc7f315e41f7
117 Register scratch, 117 Register scratch,
118 bool load_bc_into_scratch/*=true*/) { 118 bool load_bc_into_scratch/*=true*/) {
119 119
120 if (!RewriteBytecodes) return; 120 if (!RewriteBytecodes) return;
121 // the pair bytecodes have already done the load. 121 // the pair bytecodes have already done the load.
122 if (load_bc_into_scratch) __ movl(bc, bytecode); 122 if (load_bc_into_scratch) {
123 __ movl(bc, bytecode);
124 }
123 Label patch_done; 125 Label patch_done;
124 if (JvmtiExport::can_post_breakpoint()) { 126 if (JvmtiExport::can_post_breakpoint()) {
125 Label fast_patch; 127 Label fast_patch;
126 // if a breakpoint is present we can't rewrite the stream directly 128 // if a breakpoint is present we can't rewrite the stream directly
127 __ movzxb(scratch, at_bcp(0)); 129 __ movzbl(scratch, at_bcp(0));
128 __ cmpl(scratch, Bytecodes::_breakpoint); 130 __ cmpl(scratch, Bytecodes::_breakpoint);
129 __ jcc(Assembler::notEqual, fast_patch); 131 __ jcc(Assembler::notEqual, fast_patch);
130 __ get_method(scratch); 132 __ get_method(scratch);
131 // Let breakpoint table handling rewrite to quicker bytecode 133 // Let breakpoint table handling rewrite to quicker bytecode
132 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), scratch, rsi, bc); 134 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), scratch, rsi, bc);
167 169
168 170
169 171
170 void TemplateTable::aconst_null() { 172 void TemplateTable::aconst_null() {
171 transition(vtos, atos); 173 transition(vtos, atos);
172 __ xorl(rax, rax); 174 __ xorptr(rax, rax);
173 } 175 }
174 176
175 177
176 void TemplateTable::iconst(int value) { 178 void TemplateTable::iconst(int value) {
177 transition(vtos, itos); 179 transition(vtos, itos);
178 if (value == 0) { 180 if (value == 0) {
179 __ xorl(rax, rax); 181 __ xorptr(rax, rax);
180 } else { 182 } else {
181 __ movl(rax, value); 183 __ movptr(rax, value);
182 } 184 }
183 } 185 }
184 186
185 187
186 void TemplateTable::lconst(int value) { 188 void TemplateTable::lconst(int value) {
187 transition(vtos, ltos); 189 transition(vtos, ltos);
188 if (value == 0) { 190 if (value == 0) {
189 __ xorl(rax, rax); 191 __ xorptr(rax, rax);
190 } else { 192 } else {
191 __ movl(rax, value); 193 __ movptr(rax, value);
192 } 194 }
193 assert(value >= 0, "check this code"); 195 assert(value >= 0, "check this code");
194 __ xorl(rdx, rdx); 196 __ xorptr(rdx, rdx);
195 } 197 }
196 198
197 199
198 void TemplateTable::fconst(int value) { 200 void TemplateTable::fconst(int value) {
199 transition(vtos, ftos); 201 transition(vtos, ftos);
221 223
222 224
223 void TemplateTable::sipush() { 225 void TemplateTable::sipush() {
224 transition(vtos, itos); 226 transition(vtos, itos);
225 __ load_unsigned_word(rax, at_bcp(1)); 227 __ load_unsigned_word(rax, at_bcp(1));
226 __ bswap(rax); 228 __ bswapl(rax);
227 __ sarl(rax, 16); 229 __ sarl(rax, 16);
228 } 230 }
229 231
230 void TemplateTable::ldc(bool wide) { 232 void TemplateTable::ldc(bool wide) {
231 transition(vtos, vtos); 233 transition(vtos, vtos);
239 __ get_cpool_and_tags(rcx, rax); 241 __ get_cpool_and_tags(rcx, rax);
240 const int base_offset = constantPoolOopDesc::header_size() * wordSize; 242 const int base_offset = constantPoolOopDesc::header_size() * wordSize;
241 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; 243 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize;
242 244
243 // get type 245 // get type
244 __ xorl(rdx, rdx); 246 __ xorptr(rdx, rdx);
245 __ movb(rdx, Address(rax, rbx, Address::times_1, tags_offset)); 247 __ movb(rdx, Address(rax, rbx, Address::times_1, tags_offset));
246 248
247 // unresolved string - get the resolved string 249 // unresolved string - get the resolved string
248 __ cmpl(rdx, JVM_CONSTANT_UnresolvedString); 250 __ cmpl(rdx, JVM_CONSTANT_UnresolvedString);
249 __ jccb(Assembler::equal, call_ldc); 251 __ jccb(Assembler::equal, call_ldc);
269 271
270 __ bind(notClass); 272 __ bind(notClass);
271 __ cmpl(rdx, JVM_CONSTANT_Float); 273 __ cmpl(rdx, JVM_CONSTANT_Float);
272 __ jccb(Assembler::notEqual, notFloat); 274 __ jccb(Assembler::notEqual, notFloat);
273 // ftos 275 // ftos
274 __ fld_s( Address(rcx, rbx, Address::times_4, base_offset)); 276 __ fld_s( Address(rcx, rbx, Address::times_ptr, base_offset));
275 __ push(ftos); 277 __ push(ftos);
276 __ jmp(Done); 278 __ jmp(Done);
277 279
278 __ bind(notFloat); 280 __ bind(notFloat);
279 #ifdef ASSERT 281 #ifdef ASSERT
286 __ bind(L); 288 __ bind(L);
287 } 289 }
288 #endif 290 #endif
289 Label isOop; 291 Label isOop;
290 // atos and itos 292 // atos and itos
291 __ movl(rax, Address(rcx, rbx, Address::times_4, base_offset));
292 // String is only oop type we will see here 293 // String is only oop type we will see here
293 __ cmpl(rdx, JVM_CONSTANT_String); 294 __ cmpl(rdx, JVM_CONSTANT_String);
294 __ jccb(Assembler::equal, isOop); 295 __ jccb(Assembler::equal, isOop);
296 __ movl(rax, Address(rcx, rbx, Address::times_ptr, base_offset));
295 __ push(itos); 297 __ push(itos);
296 __ jmp(Done); 298 __ jmp(Done);
297 __ bind(isOop); 299 __ bind(isOop);
300 __ movptr(rax, Address(rcx, rbx, Address::times_ptr, base_offset));
298 __ push(atos); 301 __ push(atos);
299 302
300 if (VerifyOops) { 303 if (VerifyOops) {
301 __ verify_oop(rax); 304 __ verify_oop(rax);
302 } 305 }
314 317
315 // get type 318 // get type
316 __ cmpb(Address(rax, rbx, Address::times_1, tags_offset), JVM_CONSTANT_Double); 319 __ cmpb(Address(rax, rbx, Address::times_1, tags_offset), JVM_CONSTANT_Double);
317 __ jccb(Assembler::notEqual, Long); 320 __ jccb(Assembler::notEqual, Long);
318 // dtos 321 // dtos
319 __ fld_d( Address(rcx, rbx, Address::times_4, base_offset)); 322 __ fld_d( Address(rcx, rbx, Address::times_ptr, base_offset));
320 __ push(dtos); 323 __ push(dtos);
321 __ jmpb(Done); 324 __ jmpb(Done);
322 325
323 __ bind(Long); 326 __ bind(Long);
324 // ltos 327 // ltos
325 __ movl(rax, Address(rcx, rbx, Address::times_4, base_offset + 0 * wordSize)); 328 __ movptr(rax, Address(rcx, rbx, Address::times_ptr, base_offset + 0 * wordSize));
326 __ movl(rdx, Address(rcx, rbx, Address::times_4, base_offset + 1 * wordSize)); 329 NOT_LP64(__ movptr(rdx, Address(rcx, rbx, Address::times_ptr, base_offset + 1 * wordSize)));
327 330
328 __ push(ltos); 331 __ push(ltos);
329 332
330 __ bind(Done); 333 __ bind(Done);
331 } 334 }
332 335
333 336
334 void TemplateTable::locals_index(Register reg, int offset) { 337 void TemplateTable::locals_index(Register reg, int offset) {
335 __ load_unsigned_byte(reg, at_bcp(offset)); 338 __ load_unsigned_byte(reg, at_bcp(offset));
336 __ negl(reg); 339 __ negptr(reg);
337 } 340 }
338 341
339 342
340 void TemplateTable::iload() { 343 void TemplateTable::iload() {
341 transition(vtos, itos); 344 transition(vtos, itos);
397 400
398 401
399 void TemplateTable::lload() { 402 void TemplateTable::lload() {
400 transition(vtos, ltos); 403 transition(vtos, ltos);
401 locals_index(rbx); 404 locals_index(rbx);
402 __ movl(rax, laddress(rbx)); 405 __ movptr(rax, laddress(rbx));
403 __ movl(rdx, haddress(rbx)); 406 NOT_LP64(__ movl(rdx, haddress(rbx)));
404 debug_only(__ verify_local_tag(frame::TagCategory2, rbx)); 407 debug_only(__ verify_local_tag(frame::TagCategory2, rbx));
405 } 408 }
406 409
407 410
408 void TemplateTable::fload() { 411 void TemplateTable::fload() {
419 if (TaggedStackInterpreter) { 422 if (TaggedStackInterpreter) {
420 // Get double out of locals array, onto temp stack and load with 423 // Get double out of locals array, onto temp stack and load with
421 // float instruction into ST0 424 // float instruction into ST0
422 __ movl(rax, laddress(rbx)); 425 __ movl(rax, laddress(rbx));
423 __ movl(rdx, haddress(rbx)); 426 __ movl(rdx, haddress(rbx));
424 __ pushl(rdx); // push hi first 427 __ push(rdx); // push hi first
425 __ pushl(rax); 428 __ push(rax);
426 __ fld_d(Address(rsp, 0)); 429 __ fld_d(Address(rsp, 0));
427 __ addl(rsp, 2*wordSize); 430 __ addptr(rsp, 2*wordSize);
428 debug_only(__ verify_local_tag(frame::TagCategory2, rbx)); 431 debug_only(__ verify_local_tag(frame::TagCategory2, rbx));
429 } else { 432 } else {
430 __ fld_d(daddress(rbx)); 433 __ fld_d(daddress(rbx));
431 } 434 }
432 } 435 }
433 436
434 437
435 void TemplateTable::aload() { 438 void TemplateTable::aload() {
436 transition(vtos, atos); 439 transition(vtos, atos);
437 locals_index(rbx); 440 locals_index(rbx);
438 __ movl(rax, iaddress(rbx)); 441 __ movptr(rax, aaddress(rbx));
439 debug_only(__ verify_local_tag(frame::TagReference, rbx)); 442 debug_only(__ verify_local_tag(frame::TagReference, rbx));
440 } 443 }
441 444
442 445
443 void TemplateTable::locals_index_wide(Register reg) { 446 void TemplateTable::locals_index_wide(Register reg) {
444 __ movl(reg, at_bcp(2)); 447 __ movl(reg, at_bcp(2));
445 __ bswap(reg); 448 __ bswapl(reg);
446 __ shrl(reg, 16); 449 __ shrl(reg, 16);
447 __ negl(reg); 450 __ negptr(reg);
448 } 451 }
449 452
450 453
451 void TemplateTable::wide_iload() { 454 void TemplateTable::wide_iload() {
452 transition(vtos, itos); 455 transition(vtos, itos);
457 460
458 461
459 void TemplateTable::wide_lload() { 462 void TemplateTable::wide_lload() {
460 transition(vtos, ltos); 463 transition(vtos, ltos);
461 locals_index_wide(rbx); 464 locals_index_wide(rbx);
462 __ movl(rax, laddress(rbx)); 465 __ movptr(rax, laddress(rbx));
463 __ movl(rdx, haddress(rbx)); 466 NOT_LP64(__ movl(rdx, haddress(rbx)));
464 debug_only(__ verify_local_tag(frame::TagCategory2, rbx)); 467 debug_only(__ verify_local_tag(frame::TagCategory2, rbx));
465 } 468 }
466 469
467 470
468 void TemplateTable::wide_fload() { 471 void TemplateTable::wide_fload() {
479 if (TaggedStackInterpreter) { 482 if (TaggedStackInterpreter) {
480 // Get double out of locals array, onto temp stack and load with 483 // Get double out of locals array, onto temp stack and load with
481 // float instruction into ST0 484 // float instruction into ST0
482 __ movl(rax, laddress(rbx)); 485 __ movl(rax, laddress(rbx));
483 __ movl(rdx, haddress(rbx)); 486 __ movl(rdx, haddress(rbx));
484 __ pushl(rdx); // push hi first 487 __ push(rdx); // push hi first
485 __ pushl(rax); 488 __ push(rax);
486 __ fld_d(Address(rsp, 0)); 489 __ fld_d(Address(rsp, 0));
487 __ addl(rsp, 2*wordSize); 490 __ addl(rsp, 2*wordSize);
488 debug_only(__ verify_local_tag(frame::TagCategory2, rbx)); 491 debug_only(__ verify_local_tag(frame::TagCategory2, rbx));
489 } else { 492 } else {
490 __ fld_d(daddress(rbx)); 493 __ fld_d(daddress(rbx));
493 496
494 497
495 void TemplateTable::wide_aload() { 498 void TemplateTable::wide_aload() {
496 transition(vtos, atos); 499 transition(vtos, atos);
497 locals_index_wide(rbx); 500 locals_index_wide(rbx);
498 __ movl(rax, iaddress(rbx)); 501 __ movptr(rax, aaddress(rbx));
499 debug_only(__ verify_local_tag(frame::TagReference, rbx)); 502 debug_only(__ verify_local_tag(frame::TagReference, rbx));
500 } 503 }
501 504
502 void TemplateTable::index_check(Register array, Register index) { 505 void TemplateTable::index_check(Register array, Register index) {
503 // Pop ptr into array 506 // Pop ptr into array
507 510
508 void TemplateTable::index_check_without_pop(Register array, Register index) { 511 void TemplateTable::index_check_without_pop(Register array, Register index) {
509 // destroys rbx, 512 // destroys rbx,
510 // check array 513 // check array
511 __ null_check(array, arrayOopDesc::length_offset_in_bytes()); 514 __ null_check(array, arrayOopDesc::length_offset_in_bytes());
515 LP64_ONLY(__ movslq(index, index));
512 // check index 516 // check index
513 __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes())); 517 __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
514 if (index != rbx) { 518 if (index != rbx) {
515 // ??? convention: move aberrant index into rbx, for exception message 519 // ??? convention: move aberrant index into rbx, for exception message
516 assert(rbx != array, "different registers"); 520 assert(rbx != array, "different registers");
517 __ movl(rbx, index); 521 __ mov(rbx, index);
518 } 522 }
519 __ jump_cc(Assembler::aboveEqual, 523 __ jump_cc(Assembler::aboveEqual,
520 ExternalAddress(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry)); 524 ExternalAddress(Interpreter::_throw_ArrayIndexOutOfBoundsException_entry));
521 } 525 }
522 526
533 void TemplateTable::laload() { 537 void TemplateTable::laload() {
534 transition(itos, ltos); 538 transition(itos, ltos);
535 // rax,: index 539 // rax,: index
536 // rdx: array 540 // rdx: array
537 index_check(rdx, rax); 541 index_check(rdx, rax);
538 __ movl(rbx, rax); 542 __ mov(rbx, rax);
539 // rbx,: index 543 // rbx,: index
540 __ movl(rax, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize)); 544 __ movptr(rax, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize));
541 __ movl(rdx, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize)); 545 NOT_LP64(__ movl(rdx, Address(rdx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize)));
542 } 546 }
543 547
544 548
545 void TemplateTable::faload() { 549 void TemplateTable::faload() {
546 transition(itos, ftos); 550 transition(itos, ftos);
563 void TemplateTable::aaload() { 567 void TemplateTable::aaload() {
564 transition(itos, atos); 568 transition(itos, atos);
565 // rdx: array 569 // rdx: array
566 index_check(rdx, rax); // kills rbx, 570 index_check(rdx, rax); // kills rbx,
567 // rax,: index 571 // rax,: index
568 __ movl(rax, Address(rdx, rax, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_OBJECT))); 572 __ movptr(rax, Address(rdx, rax, Address::times_ptr, arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
569 } 573 }
570 574
571 575
572 void TemplateTable::baload() { 576 void TemplateTable::baload() {
573 transition(itos, itos); 577 transition(itos, itos);
574 // rdx: array 578 // rdx: array
575 index_check(rdx, rax); // kills rbx, 579 index_check(rdx, rax); // kills rbx,
576 // rax,: index 580 // rax,: index
577 // can do better code for P5 - fix this at some point 581 // can do better code for P5 - fix this at some point
578 __ load_signed_byte(rbx, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE))); 582 __ load_signed_byte(rbx, Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)));
579 __ movl(rax, rbx); 583 __ mov(rax, rbx);
580 } 584 }
581 585
582 586
583 void TemplateTable::caload() { 587 void TemplateTable::caload() {
584 transition(itos, itos); 588 transition(itos, itos);
585 // rdx: array 589 // rdx: array
586 index_check(rdx, rax); // kills rbx, 590 index_check(rdx, rax); // kills rbx,
587 // rax,: index 591 // rax,: index
588 // can do better code for P5 - may want to improve this at some point 592 // can do better code for P5 - may want to improve this at some point
589 __ load_unsigned_word(rbx, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR))); 593 __ load_unsigned_word(rbx, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
590 __ movl(rax, rbx); 594 __ mov(rax, rbx);
591 } 595 }
592 596
593 // iload followed by caload frequent pair 597 // iload followed by caload frequent pair
594 void TemplateTable::fast_icaload() { 598 void TemplateTable::fast_icaload() {
595 transition(vtos, itos); 599 transition(vtos, itos);
600 604
601 // rdx: array 605 // rdx: array
602 index_check(rdx, rax); 606 index_check(rdx, rax);
603 // rax,: index 607 // rax,: index
604 __ load_unsigned_word(rbx, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR))); 608 __ load_unsigned_word(rbx, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)));
605 __ movl(rax, rbx); 609 __ mov(rax, rbx);
606 } 610 }
607 611
608 void TemplateTable::saload() { 612 void TemplateTable::saload() {
609 transition(itos, itos); 613 transition(itos, itos);
610 // rdx: array 614 // rdx: array
611 index_check(rdx, rax); // kills rbx, 615 index_check(rdx, rax); // kills rbx,
612 // rax,: index 616 // rax,: index
613 // can do better code for P5 - may want to improve this at some point 617 // can do better code for P5 - may want to improve this at some point
614 __ load_signed_word(rbx, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT))); 618 __ load_signed_word(rbx, Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_SHORT)));
615 __ movl(rax, rbx); 619 __ mov(rax, rbx);
616 } 620 }
617 621
618 622
619 void TemplateTable::iload(int n) { 623 void TemplateTable::iload(int n) {
620 transition(vtos, itos); 624 transition(vtos, itos);
623 } 627 }
624 628
625 629
626 void TemplateTable::lload(int n) { 630 void TemplateTable::lload(int n) {
627 transition(vtos, ltos); 631 transition(vtos, ltos);
628 __ movl(rax, laddress(n)); 632 __ movptr(rax, laddress(n));
629 __ movl(rdx, haddress(n)); 633 NOT_LP64(__ movptr(rdx, haddress(n)));
630 debug_only(__ verify_local_tag(frame::TagCategory2, n)); 634 debug_only(__ verify_local_tag(frame::TagCategory2, n));
631 } 635 }
632 636
633 637
634 void TemplateTable::fload(int n) { 638 void TemplateTable::fload(int n) {
643 if (TaggedStackInterpreter) { 647 if (TaggedStackInterpreter) {
644 // Get double out of locals array, onto temp stack and load with 648 // Get double out of locals array, onto temp stack and load with
645 // float instruction into ST0 649 // float instruction into ST0
646 __ movl(rax, laddress(n)); 650 __ movl(rax, laddress(n));
647 __ movl(rdx, haddress(n)); 651 __ movl(rdx, haddress(n));
648 __ pushl(rdx); // push hi first 652 __ push(rdx); // push hi first
649 __ pushl(rax); 653 __ push(rax);
650 __ fld_d(Address(rsp, 0)); 654 __ fld_d(Address(rsp, 0));
651 __ addl(rsp, 2*wordSize); // reset rsp 655 __ addptr(rsp, 2*wordSize); // reset rsp
652 debug_only(__ verify_local_tag(frame::TagCategory2, n)); 656 debug_only(__ verify_local_tag(frame::TagCategory2, n));
653 } else { 657 } else {
654 __ fld_d(daddress(n)); 658 __ fld_d(daddress(n));
655 } 659 }
656 } 660 }
657 661
658 662
659 void TemplateTable::aload(int n) { 663 void TemplateTable::aload(int n) {
660 transition(vtos, atos); 664 transition(vtos, atos);
661 __ movl(rax, aaddress(n)); 665 __ movptr(rax, aaddress(n));
662 debug_only(__ verify_local_tag(frame::TagReference, n)); 666 debug_only(__ verify_local_tag(frame::TagReference, n));
663 } 667 }
664 668
665 669
666 void TemplateTable::aload_0() { 670 void TemplateTable::aload_0() {
738 742
739 743
740 void TemplateTable::lstore() { 744 void TemplateTable::lstore() {
741 transition(ltos, vtos); 745 transition(ltos, vtos);
742 locals_index(rbx); 746 locals_index(rbx);
743 __ movl(laddress(rbx), rax); 747 __ movptr(laddress(rbx), rax);
744 __ movl(haddress(rbx), rdx); 748 NOT_LP64(__ movptr(haddress(rbx), rdx));
745 __ tag_local(frame::TagCategory2, rbx); 749 __ tag_local(frame::TagCategory2, rbx);
746 } 750 }
747 751
748 752
749 void TemplateTable::fstore() { 753 void TemplateTable::fstore() {
757 void TemplateTable::dstore() { 761 void TemplateTable::dstore() {
758 transition(dtos, vtos); 762 transition(dtos, vtos);
759 locals_index(rbx); 763 locals_index(rbx);
760 if (TaggedStackInterpreter) { 764 if (TaggedStackInterpreter) {
761 // Store double on stack and reload into locals nonadjacently 765 // Store double on stack and reload into locals nonadjacently
762 __ subl(rsp, 2 * wordSize); 766 __ subptr(rsp, 2 * wordSize);
763 __ fstp_d(Address(rsp, 0)); 767 __ fstp_d(Address(rsp, 0));
764 __ popl(rax); 768 __ pop(rax);
765 __ popl(rdx); 769 __ pop(rdx);
766 __ movl(laddress(rbx), rax); 770 __ movptr(laddress(rbx), rax);
767 __ movl(haddress(rbx), rdx); 771 __ movptr(haddress(rbx), rdx);
768 __ tag_local(frame::TagCategory2, rbx); 772 __ tag_local(frame::TagCategory2, rbx);
769 } else { 773 } else {
770 __ fstp_d(daddress(rbx)); 774 __ fstp_d(daddress(rbx));
771 } 775 }
772 } 776 }
774 778
775 void TemplateTable::astore() { 779 void TemplateTable::astore() {
776 transition(vtos, vtos); 780 transition(vtos, vtos);
777 __ pop_ptr(rax, rdx); // will need to pop tag too 781 __ pop_ptr(rax, rdx); // will need to pop tag too
778 locals_index(rbx); 782 locals_index(rbx);
779 __ movl(aaddress(rbx), rax); 783 __ movptr(aaddress(rbx), rax);
780 __ tag_local(rdx, rbx); // need to store same tag in local may be returnAddr 784 __ tag_local(rdx, rbx); // need to store same tag in local may be returnAddr
781 } 785 }
782 786
783 787
784 void TemplateTable::wide_istore() { 788 void TemplateTable::wide_istore() {
792 796
793 void TemplateTable::wide_lstore() { 797 void TemplateTable::wide_lstore() {
794 transition(vtos, vtos); 798 transition(vtos, vtos);
795 __ pop_l(rax, rdx); 799 __ pop_l(rax, rdx);
796 locals_index_wide(rbx); 800 locals_index_wide(rbx);
797 __ movl(laddress(rbx), rax); 801 __ movptr(laddress(rbx), rax);
798 __ movl(haddress(rbx), rdx); 802 NOT_LP64(__ movl(haddress(rbx), rdx));
799 __ tag_local(frame::TagCategory2, rbx); 803 __ tag_local(frame::TagCategory2, rbx);
800 } 804 }
801 805
802 806
803 void TemplateTable::wide_fstore() { 807 void TemplateTable::wide_fstore() {
812 816
813 void TemplateTable::wide_astore() { 817 void TemplateTable::wide_astore() {
814 transition(vtos, vtos); 818 transition(vtos, vtos);
815 __ pop_ptr(rax, rdx); 819 __ pop_ptr(rax, rdx);
816 locals_index_wide(rbx); 820 locals_index_wide(rbx);
817 __ movl(aaddress(rbx), rax); 821 __ movptr(aaddress(rbx), rax);
818 __ tag_local(rdx, rbx); 822 __ tag_local(rdx, rbx);
819 } 823 }
820 824
821 825
822 void TemplateTable::iastore() { 826 void TemplateTable::iastore() {
836 // rax,: low(value) 840 // rax,: low(value)
837 // rcx: array 841 // rcx: array
838 // rdx: high(value) 842 // rdx: high(value)
839 index_check(rcx, rbx); // prefer index in rbx, 843 index_check(rcx, rbx); // prefer index in rbx,
840 // rbx,: index 844 // rbx,: index
841 __ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize), rax); 845 __ movptr(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 0 * wordSize), rax);
842 __ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize), rdx); 846 NOT_LP64(__ movl(Address(rcx, rbx, Address::times_8, arrayOopDesc::base_offset_in_bytes(T_LONG) + 1 * wordSize), rdx));
843 } 847 }
844 848
845 849
846 void TemplateTable::fastore() { 850 void TemplateTable::fastore() {
847 transition(ftos, vtos); 851 transition(ftos, vtos);
867 871
868 void TemplateTable::aastore() { 872 void TemplateTable::aastore() {
869 Label is_null, ok_is_subtype, done; 873 Label is_null, ok_is_subtype, done;
870 transition(vtos, vtos); 874 transition(vtos, vtos);
871 // stack: ..., array, index, value 875 // stack: ..., array, index, value
872 __ movl(rax, at_tos()); // Value 876 __ movptr(rax, at_tos()); // Value
873 __ movl(rcx, at_tos_p1()); // Index 877 __ movl(rcx, at_tos_p1()); // Index
874 __ movl(rdx, at_tos_p2()); // Array 878 __ movptr(rdx, at_tos_p2()); // Array
875 index_check_without_pop(rdx, rcx); // kills rbx, 879 index_check_without_pop(rdx, rcx); // kills rbx,
876 // do array store check - check for NULL value first 880 // do array store check - check for NULL value first
877 __ testl(rax, rax); 881 __ testptr(rax, rax);
878 __ jcc(Assembler::zero, is_null); 882 __ jcc(Assembler::zero, is_null);
879 883
880 // Move subklass into EBX 884 // Move subklass into EBX
881 __ movl(rbx, Address(rax, oopDesc::klass_offset_in_bytes())); 885 __ movptr(rbx, Address(rax, oopDesc::klass_offset_in_bytes()));
882 // Move superklass into EAX 886 // Move superklass into EAX
883 __ movl(rax, Address(rdx, oopDesc::klass_offset_in_bytes())); 887 __ movptr(rax, Address(rdx, oopDesc::klass_offset_in_bytes()));
884 __ movl(rax, Address(rax, sizeof(oopDesc) + objArrayKlass::element_klass_offset_in_bytes())); 888 __ movptr(rax, Address(rax, sizeof(oopDesc) + objArrayKlass::element_klass_offset_in_bytes()));
885 // Compress array+index*4+12 into a single register. Frees ECX. 889 // Compress array+index*wordSize+12 into a single register. Frees ECX.
886 __ leal(rdx, Address(rdx, rcx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_OBJECT))); 890 __ lea(rdx, Address(rdx, rcx, Address::times_ptr, arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
887 891
888 // Generate subtype check. Blows ECX. Resets EDI to locals. 892 // Generate subtype check. Blows ECX. Resets EDI to locals.
889 // Superklass in EAX. Subklass in EBX. 893 // Superklass in EAX. Subklass in EBX.
890 __ gen_subtype_check( rbx, ok_is_subtype ); 894 __ gen_subtype_check( rbx, ok_is_subtype );
891 895
893 // object is at TOS 897 // object is at TOS
894 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry)); 898 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
895 899
896 // Come here on success 900 // Come here on success
897 __ bind(ok_is_subtype); 901 __ bind(ok_is_subtype);
898 __ movl(rax, at_rsp()); // Value 902 __ movptr(rax, at_rsp()); // Value
899 __ movl(Address(rdx, 0), rax); 903 __ movptr(Address(rdx, 0), rax);
900 __ store_check(rdx); 904 __ store_check(rdx);
901 __ jmpb(done); 905 __ jmpb(done);
902 906
903 // Have a NULL in EAX, EDX=array, ECX=index. Store NULL at ary[idx] 907 // Have a NULL in EAX, EDX=array, ECX=index. Store NULL at ary[idx]
904 __ bind(is_null); 908 __ bind(is_null);
905 __ profile_null_seen(rbx); 909 __ profile_null_seen(rbx);
906 __ movl(Address(rdx, rcx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_OBJECT)), rax); 910 __ movptr(Address(rdx, rcx, Address::times_ptr, arrayOopDesc::base_offset_in_bytes(T_OBJECT)), rax);
907 911
908 // Pop stack arguments 912 // Pop stack arguments
909 __ bind(done); 913 __ bind(done);
910 __ addl(rsp, 3 * Interpreter::stackElementSize()); 914 __ addptr(rsp, 3 * Interpreter::stackElementSize());
911 } 915 }
912 916
913 917
914 void TemplateTable::bastore() { 918 void TemplateTable::bastore() {
915 transition(itos, vtos); 919 transition(itos, vtos);
945 } 949 }
946 950
947 951
948 void TemplateTable::lstore(int n) { 952 void TemplateTable::lstore(int n) {
949 transition(ltos, vtos); 953 transition(ltos, vtos);
950 __ movl(laddress(n), rax); 954 __ movptr(laddress(n), rax);
951 __ movl(haddress(n), rdx); 955 NOT_LP64(__ movptr(haddress(n), rdx));
952 __ tag_local(frame::TagCategory2, n); 956 __ tag_local(frame::TagCategory2, n);
953 } 957 }
954 958
955 959
956 void TemplateTable::fstore(int n) { 960 void TemplateTable::fstore(int n) {
961 965
962 966
963 void TemplateTable::dstore(int n) { 967 void TemplateTable::dstore(int n) {
964 transition(dtos, vtos); 968 transition(dtos, vtos);
965 if (TaggedStackInterpreter) { 969 if (TaggedStackInterpreter) {
966 __ subl(rsp, 2 * wordSize); 970 __ subptr(rsp, 2 * wordSize);
967 __ fstp_d(Address(rsp, 0)); 971 __ fstp_d(Address(rsp, 0));
968 __ popl(rax); 972 __ pop(rax);
969 __ popl(rdx); 973 __ pop(rdx);
970 __ movl(laddress(n), rax); 974 __ movl(laddress(n), rax);
971 __ movl(haddress(n), rdx); 975 __ movl(haddress(n), rdx);
972 __ tag_local(frame::TagCategory2, n); 976 __ tag_local(frame::TagCategory2, n);
973 } else { 977 } else {
974 __ fstp_d(daddress(n)); 978 __ fstp_d(daddress(n));
977 981
978 982
979 void TemplateTable::astore(int n) { 983 void TemplateTable::astore(int n) {
980 transition(vtos, vtos); 984 transition(vtos, vtos);
981 __ pop_ptr(rax, rdx); 985 __ pop_ptr(rax, rdx);
982 __ movl(aaddress(n), rax); 986 __ movptr(aaddress(n), rax);
983 __ tag_local(rdx, n); 987 __ tag_local(rdx, n);
984 } 988 }
985 989
986 990
987 void TemplateTable::pop() { 991 void TemplateTable::pop() {
988 transition(vtos, vtos); 992 transition(vtos, vtos);
989 __ addl(rsp, Interpreter::stackElementSize()); 993 __ addptr(rsp, Interpreter::stackElementSize());
990 } 994 }
991 995
992 996
993 void TemplateTable::pop2() { 997 void TemplateTable::pop2() {
994 transition(vtos, vtos); 998 transition(vtos, vtos);
995 __ addl(rsp, 2*Interpreter::stackElementSize()); 999 __ addptr(rsp, 2*Interpreter::stackElementSize());
996 } 1000 }
997 1001
998 1002
999 void TemplateTable::dup() { 1003 void TemplateTable::dup() {
1000 transition(vtos, vtos); 1004 transition(vtos, vtos);
1097 1101
1098 void TemplateTable::iop2(Operation op) { 1102 void TemplateTable::iop2(Operation op) {
1099 transition(itos, itos); 1103 transition(itos, itos);
1100 switch (op) { 1104 switch (op) {
1101 case add : __ pop_i(rdx); __ addl (rax, rdx); break; 1105 case add : __ pop_i(rdx); __ addl (rax, rdx); break;
1102 case sub : __ movl(rdx, rax); __ pop_i(rax); __ subl (rax, rdx); break; 1106 case sub : __ mov(rdx, rax); __ pop_i(rax); __ subl (rax, rdx); break;
1103 case mul : __ pop_i(rdx); __ imull(rax, rdx); break; 1107 case mul : __ pop_i(rdx); __ imull(rax, rdx); break;
1104 case _and : __ pop_i(rdx); __ andl (rax, rdx); break; 1108 case _and : __ pop_i(rdx); __ andl (rax, rdx); break;
1105 case _or : __ pop_i(rdx); __ orl (rax, rdx); break; 1109 case _or : __ pop_i(rdx); __ orl (rax, rdx); break;
1106 case _xor : __ pop_i(rdx); __ xorl (rax, rdx); break; 1110 case _xor : __ pop_i(rdx); __ xorl (rax, rdx); break;
1107 case shl : __ movl(rcx, rax); __ pop_i(rax); __ shll (rax); break; // implicit masking of lower 5 bits by Intel shift instr. 1111 case shl : __ mov(rcx, rax); __ pop_i(rax); __ shll (rax); break; // implicit masking of lower 5 bits by Intel shift instr.
1108 case shr : __ movl(rcx, rax); __ pop_i(rax); __ sarl (rax); break; // implicit masking of lower 5 bits by Intel shift instr. 1112 case shr : __ mov(rcx, rax); __ pop_i(rax); __ sarl (rax); break; // implicit masking of lower 5 bits by Intel shift instr.
1109 case ushr : __ movl(rcx, rax); __ pop_i(rax); __ shrl (rax); break; // implicit masking of lower 5 bits by Intel shift instr. 1113 case ushr : __ mov(rcx, rax); __ pop_i(rax); __ shrl (rax); break; // implicit masking of lower 5 bits by Intel shift instr.
1110 default : ShouldNotReachHere(); 1114 default : ShouldNotReachHere();
1111 } 1115 }
1112 } 1116 }
1113 1117
1114 1118
1116 transition(ltos, ltos); 1120 transition(ltos, ltos);
1117 __ pop_l(rbx, rcx); 1121 __ pop_l(rbx, rcx);
1118 switch (op) { 1122 switch (op) {
1119 case add : __ addl(rax, rbx); __ adcl(rdx, rcx); break; 1123 case add : __ addl(rax, rbx); __ adcl(rdx, rcx); break;
1120 case sub : __ subl(rbx, rax); __ sbbl(rcx, rdx); 1124 case sub : __ subl(rbx, rax); __ sbbl(rcx, rdx);
1121 __ movl(rax, rbx); __ movl(rdx, rcx); break; 1125 __ mov(rax, rbx); __ mov(rdx, rcx); break;
1122 case _and: __ andl(rax, rbx); __ andl(rdx, rcx); break; 1126 case _and: __ andl(rax, rbx); __ andl(rdx, rcx); break;
1123 case _or : __ orl (rax, rbx); __ orl (rdx, rcx); break; 1127 case _or : __ orl (rax, rbx); __ orl (rdx, rcx); break;
1124 case _xor: __ xorl(rax, rbx); __ xorl(rdx, rcx); break; 1128 case _xor: __ xorl(rax, rbx); __ xorl(rdx, rcx); break;
1125 default : ShouldNotReachHere(); 1129 default : ShouldNotReachHere();
1126 } 1130 }
1127 } 1131 }
1128 1132
1129 1133
1130 void TemplateTable::idiv() { 1134 void TemplateTable::idiv() {
1131 transition(itos, itos); 1135 transition(itos, itos);
1132 __ movl(rcx, rax); 1136 __ mov(rcx, rax);
1133 __ pop_i(rax); 1137 __ pop_i(rax);
1134 // Note: could xor rax, and rcx and compare with (-1 ^ min_int). If 1138 // Note: could xor rax, and rcx and compare with (-1 ^ min_int). If
1135 // they are not equal, one could do a normal division (no correction 1139 // they are not equal, one could do a normal division (no correction
1136 // needed), which may speed up this implementation for the common case. 1140 // needed), which may speed up this implementation for the common case.
1137 // (see also JVM spec., p.243 & p.271) 1141 // (see also JVM spec., p.243 & p.271)
1139 } 1143 }
1140 1144
1141 1145
1142 void TemplateTable::irem() { 1146 void TemplateTable::irem() {
1143 transition(itos, itos); 1147 transition(itos, itos);
1144 __ movl(rcx, rax); 1148 __ mov(rcx, rax);
1145 __ pop_i(rax); 1149 __ pop_i(rax);
1146 // Note: could xor rax, and rcx and compare with (-1 ^ min_int). If 1150 // Note: could xor rax, and rcx and compare with (-1 ^ min_int). If
1147 // they are not equal, one could do a normal division (no correction 1151 // they are not equal, one could do a normal division (no correction
1148 // needed), which may speed up this implementation for the common case. 1152 // needed), which may speed up this implementation for the common case.
1149 // (see also JVM spec., p.243 & p.271) 1153 // (see also JVM spec., p.243 & p.271)
1150 __ corrected_idivl(rcx); 1154 __ corrected_idivl(rcx);
1151 __ movl(rax, rdx); 1155 __ mov(rax, rdx);
1152 } 1156 }
1153 1157
1154 1158
1155 void TemplateTable::lmul() { 1159 void TemplateTable::lmul() {
1156 transition(ltos, ltos); 1160 transition(ltos, ltos);
1157 __ pop_l(rbx, rcx); 1161 __ pop_l(rbx, rcx);
1158 __ pushl(rcx); __ pushl(rbx); 1162 __ push(rcx); __ push(rbx);
1159 __ pushl(rdx); __ pushl(rax); 1163 __ push(rdx); __ push(rax);
1160 __ lmul(2 * wordSize, 0); 1164 __ lmul(2 * wordSize, 0);
1161 __ addl(rsp, 4 * wordSize); // take off temporaries 1165 __ addptr(rsp, 4 * wordSize); // take off temporaries
1162 } 1166 }
1163 1167
1164 1168
1165 void TemplateTable::ldiv() { 1169 void TemplateTable::ldiv() {
1166 transition(ltos, ltos); 1170 transition(ltos, ltos);
1167 __ pop_l(rbx, rcx); 1171 __ pop_l(rbx, rcx);
1168 __ pushl(rcx); __ pushl(rbx); 1172 __ push(rcx); __ push(rbx);
1169 __ pushl(rdx); __ pushl(rax); 1173 __ push(rdx); __ push(rax);
1170 // check if y = 0 1174 // check if y = 0
1171 __ orl(rax, rdx); 1175 __ orl(rax, rdx);
1172 __ jump_cc(Assembler::zero, 1176 __ jump_cc(Assembler::zero,
1173 ExternalAddress(Interpreter::_throw_ArithmeticException_entry)); 1177 ExternalAddress(Interpreter::_throw_ArithmeticException_entry));
1174 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::ldiv)); 1178 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::ldiv));
1175 __ addl(rsp, 4 * wordSize); // take off temporaries 1179 __ addptr(rsp, 4 * wordSize); // take off temporaries
1176 } 1180 }
1177 1181
1178 1182
1179 void TemplateTable::lrem() { 1183 void TemplateTable::lrem() {
1180 transition(ltos, ltos); 1184 transition(ltos, ltos);
1181 __ pop_l(rbx, rcx); 1185 __ pop_l(rbx, rcx);
1182 __ pushl(rcx); __ pushl(rbx); 1186 __ push(rcx); __ push(rbx);
1183 __ pushl(rdx); __ pushl(rax); 1187 __ push(rdx); __ push(rax);
1184 // check if y = 0 1188 // check if y = 0
1185 __ orl(rax, rdx); 1189 __ orl(rax, rdx);
1186 __ jump_cc(Assembler::zero, 1190 __ jump_cc(Assembler::zero,
1187 ExternalAddress(Interpreter::_throw_ArithmeticException_entry)); 1191 ExternalAddress(Interpreter::_throw_ArithmeticException_entry));
1188 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::lrem)); 1192 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::lrem));
1189 __ addl(rsp, 4 * wordSize); 1193 __ addptr(rsp, 4 * wordSize);
1190 } 1194 }
1191 1195
1192 1196
1193 void TemplateTable::lshl() { 1197 void TemplateTable::lshl() {
1194 transition(itos, ltos); 1198 transition(itos, ltos);
1198 } 1202 }
1199 1203
1200 1204
1201 void TemplateTable::lshr() { 1205 void TemplateTable::lshr() {
1202 transition(itos, ltos); 1206 transition(itos, ltos);
1203 __ movl(rcx, rax); // get shift count 1207 __ mov(rcx, rax); // get shift count
1204 __ pop_l(rax, rdx); // get shift value 1208 __ pop_l(rax, rdx); // get shift value
1205 __ lshr(rdx, rax, true); 1209 __ lshr(rdx, rax, true);
1206 } 1210 }
1207 1211
1208 1212
1209 void TemplateTable::lushr() { 1213 void TemplateTable::lushr() {
1210 transition(itos, ltos); 1214 transition(itos, ltos);
1211 __ movl(rcx, rax); // get shift count 1215 __ mov(rcx, rax); // get shift count
1212 __ pop_l(rax, rdx); // get shift value 1216 __ pop_l(rax, rdx); // get shift value
1213 __ lshr(rdx, rax); 1217 __ lshr(rdx, rax);
1214 } 1218 }
1215 1219
1216 1220
1224 case div: __ fdivr_s(at_rsp()); break; 1228 case div: __ fdivr_s(at_rsp()); break;
1225 case rem: __ fld_s (at_rsp()); __ fremr(rax); break; 1229 case rem: __ fld_s (at_rsp()); __ fremr(rax); break;
1226 default : ShouldNotReachHere(); 1230 default : ShouldNotReachHere();
1227 } 1231 }
1228 __ f2ieee(); 1232 __ f2ieee();
1229 __ popl(rax); // pop float thing off 1233 __ pop(rax); // pop float thing off
1230 } 1234 }
1231 1235
1232 1236
1233 void TemplateTable::dop2(Operation op) { 1237 void TemplateTable::dop2(Operation op) {
1234 transition(dtos, dtos); 1238 transition(dtos, dtos);
1278 case rem: __ fld_d (at_rsp()); __ fremr(rax); break; 1282 case rem: __ fld_d (at_rsp()); __ fremr(rax); break;
1279 default : ShouldNotReachHere(); 1283 default : ShouldNotReachHere();
1280 } 1284 }
1281 __ d2ieee(); 1285 __ d2ieee();
1282 // Pop double precision number from rsp. 1286 // Pop double precision number from rsp.
1283 __ popl(rax); 1287 __ pop(rax);
1284 __ popl(rdx); 1288 __ pop(rdx);
1285 } 1289 }
1286 1290
1287 1291
1288 void TemplateTable::ineg() { 1292 void TemplateTable::ineg() {
1289 transition(itos, itos); 1293 transition(itos, itos);
1319 1323
1320 void TemplateTable::wide_iinc() { 1324 void TemplateTable::wide_iinc() {
1321 transition(vtos, vtos); 1325 transition(vtos, vtos);
1322 __ movl(rdx, at_bcp(4)); // get constant 1326 __ movl(rdx, at_bcp(4)); // get constant
1323 locals_index_wide(rbx); 1327 locals_index_wide(rbx);
1324 __ bswap(rdx); // swap bytes & sign-extend constant 1328 __ bswapl(rdx); // swap bytes & sign-extend constant
1325 __ sarl(rdx, 16); 1329 __ sarl(rdx, 16);
1326 __ addl(iaddress(rbx), rdx); 1330 __ addl(iaddress(rbx), rdx);
1327 // Note: should probably use only one movl to get both 1331 // Note: should probably use only one movl to get both
1328 // the index and the constant -> fix this 1332 // the index and the constant -> fix this
1329 } 1333 }
1373 transition(tos_in, tos_out); 1377 transition(tos_in, tos_out);
1374 } 1378 }
1375 #endif // ASSERT 1379 #endif // ASSERT
1376 1380
1377 // Conversion 1381 // Conversion
1378 // (Note: use pushl(rcx)/popl(rcx) for 1/2-word stack-ptr manipulation) 1382 // (Note: use push(rcx)/pop(rcx) for 1/2-word stack-ptr manipulation)
1379 switch (bytecode()) { 1383 switch (bytecode()) {
1380 case Bytecodes::_i2l: 1384 case Bytecodes::_i2l:
1381 __ extend_sign(rdx, rax); 1385 __ extend_sign(rdx, rax);
1382 break; 1386 break;
1383 case Bytecodes::_i2f: 1387 case Bytecodes::_i2f:
1384 __ pushl(rax); // store int on tos 1388 __ push(rax); // store int on tos
1385 __ fild_s(at_rsp()); // load int to ST0 1389 __ fild_s(at_rsp()); // load int to ST0
1386 __ f2ieee(); // truncate to float size 1390 __ f2ieee(); // truncate to float size
1387 __ popl(rcx); // adjust rsp 1391 __ pop(rcx); // adjust rsp
1388 break; 1392 break;
1389 case Bytecodes::_i2d: 1393 case Bytecodes::_i2d:
1390 __ pushl(rax); // add one slot for d2ieee() 1394 __ push(rax); // add one slot for d2ieee()
1391 __ pushl(rax); // store int on tos 1395 __ push(rax); // store int on tos
1392 __ fild_s(at_rsp()); // load int to ST0 1396 __ fild_s(at_rsp()); // load int to ST0
1393 __ d2ieee(); // truncate to double size 1397 __ d2ieee(); // truncate to double size
1394 __ popl(rcx); // adjust rsp 1398 __ pop(rcx); // adjust rsp
1395 __ popl(rcx); 1399 __ pop(rcx);
1396 break; 1400 break;
1397 case Bytecodes::_i2b: 1401 case Bytecodes::_i2b:
1398 __ shll(rax, 24); // truncate upper 24 bits 1402 __ shll(rax, 24); // truncate upper 24 bits
1399 __ sarl(rax, 24); // and sign-extend byte 1403 __ sarl(rax, 24); // and sign-extend byte
1404 LP64_ONLY(__ movsbl(rax, rax));
1400 break; 1405 break;
1401 case Bytecodes::_i2c: 1406 case Bytecodes::_i2c:
1402 __ andl(rax, 0xFFFF); // truncate upper 16 bits 1407 __ andl(rax, 0xFFFF); // truncate upper 16 bits
1408 LP64_ONLY(__ movzwl(rax, rax));
1403 break; 1409 break;
1404 case Bytecodes::_i2s: 1410 case Bytecodes::_i2s:
1405 __ shll(rax, 16); // truncate upper 16 bits 1411 __ shll(rax, 16); // truncate upper 16 bits
1406 __ sarl(rax, 16); // and sign-extend short 1412 __ sarl(rax, 16); // and sign-extend short
1413 LP64_ONLY(__ movswl(rax, rax));
1407 break; 1414 break;
1408 case Bytecodes::_l2i: 1415 case Bytecodes::_l2i:
1409 /* nothing to do */ 1416 /* nothing to do */
1410 break; 1417 break;
1411 case Bytecodes::_l2f: 1418 case Bytecodes::_l2f:
1412 __ pushl(rdx); // store long on tos 1419 __ push(rdx); // store long on tos
1413 __ pushl(rax); 1420 __ push(rax);
1414 __ fild_d(at_rsp()); // load long to ST0 1421 __ fild_d(at_rsp()); // load long to ST0
1415 __ f2ieee(); // truncate to float size 1422 __ f2ieee(); // truncate to float size
1416 __ popl(rcx); // adjust rsp 1423 __ pop(rcx); // adjust rsp
1417 __ popl(rcx); 1424 __ pop(rcx);
1418 break; 1425 break;
1419 case Bytecodes::_l2d: 1426 case Bytecodes::_l2d:
1420 __ pushl(rdx); // store long on tos 1427 __ push(rdx); // store long on tos
1421 __ pushl(rax); 1428 __ push(rax);
1422 __ fild_d(at_rsp()); // load long to ST0 1429 __ fild_d(at_rsp()); // load long to ST0
1423 __ d2ieee(); // truncate to double size 1430 __ d2ieee(); // truncate to double size
1424 __ popl(rcx); // adjust rsp 1431 __ pop(rcx); // adjust rsp
1425 __ popl(rcx); 1432 __ pop(rcx);
1426 break; 1433 break;
1427 case Bytecodes::_f2i: 1434 case Bytecodes::_f2i:
1428 __ pushl(rcx); // reserve space for argument 1435 __ push(rcx); // reserve space for argument
1429 __ fstp_s(at_rsp()); // pass float argument on stack 1436 __ fstp_s(at_rsp()); // pass float argument on stack
1430 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::f2i), 1); 1437 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::f2i), 1);
1431 break; 1438 break;
1432 case Bytecodes::_f2l: 1439 case Bytecodes::_f2l:
1433 __ pushl(rcx); // reserve space for argument 1440 __ push(rcx); // reserve space for argument
1434 __ fstp_s(at_rsp()); // pass float argument on stack 1441 __ fstp_s(at_rsp()); // pass float argument on stack
1435 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::f2l), 1); 1442 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::f2l), 1);
1436 break; 1443 break;
1437 case Bytecodes::_f2d: 1444 case Bytecodes::_f2d:
1438 /* nothing to do */ 1445 /* nothing to do */
1439 break; 1446 break;
1440 case Bytecodes::_d2i: 1447 case Bytecodes::_d2i:
1441 __ pushl(rcx); // reserve space for argument 1448 __ push(rcx); // reserve space for argument
1442 __ pushl(rcx); 1449 __ push(rcx);
1443 __ fstp_d(at_rsp()); // pass double argument on stack 1450 __ fstp_d(at_rsp()); // pass double argument on stack
1444 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::d2i), 2); 1451 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::d2i), 2);
1445 break; 1452 break;
1446 case Bytecodes::_d2l: 1453 case Bytecodes::_d2l:
1447 __ pushl(rcx); // reserve space for argument 1454 __ push(rcx); // reserve space for argument
1448 __ pushl(rcx); 1455 __ push(rcx);
1449 __ fstp_d(at_rsp()); // pass double argument on stack 1456 __ fstp_d(at_rsp()); // pass double argument on stack
1450 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::d2l), 2); 1457 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::d2l), 2);
1451 break; 1458 break;
1452 case Bytecodes::_d2f: 1459 case Bytecodes::_d2f:
1453 __ pushl(rcx); // reserve space for f2ieee() 1460 __ push(rcx); // reserve space for f2ieee()
1454 __ f2ieee(); // truncate to float size 1461 __ f2ieee(); // truncate to float size
1455 __ popl(rcx); // adjust rsp 1462 __ pop(rcx); // adjust rsp
1456 break; 1463 break;
1457 default : 1464 default :
1458 ShouldNotReachHere(); 1465 ShouldNotReachHere();
1459 } 1466 }
1460 } 1467 }
1463 void TemplateTable::lcmp() { 1470 void TemplateTable::lcmp() {
1464 transition(ltos, itos); 1471 transition(ltos, itos);
1465 // y = rdx:rax 1472 // y = rdx:rax
1466 __ pop_l(rbx, rcx); // get x = rcx:rbx 1473 __ pop_l(rbx, rcx); // get x = rcx:rbx
1467 __ lcmp2int(rcx, rbx, rdx, rax);// rcx := cmp(x, y) 1474 __ lcmp2int(rcx, rbx, rdx, rax);// rcx := cmp(x, y)
1468 __ movl(rax, rcx); 1475 __ mov(rax, rcx);
1469 } 1476 }
1470 1477
1471 1478
1472 void TemplateTable::float_cmp(bool is_float, int unordered_result) { 1479 void TemplateTable::float_cmp(bool is_float, int unordered_result) {
1473 if (is_float) { 1480 if (is_float) {
1474 __ pop_ftos_to_rsp(); 1481 __ pop_ftos_to_rsp();
1475 __ fld_s(at_rsp()); 1482 __ fld_s(at_rsp());
1476 } else { 1483 } else {
1477 __ pop_dtos_to_rsp(); 1484 __ pop_dtos_to_rsp();
1478 __ fld_d(at_rsp()); 1485 __ fld_d(at_rsp());
1479 __ popl(rdx); 1486 __ pop(rdx);
1480 } 1487 }
1481 __ popl(rcx); 1488 __ pop(rcx);
1482 __ fcmp2int(rax, unordered_result < 0); 1489 __ fcmp2int(rax, unordered_result < 0);
1483 } 1490 }
1484 1491
1485 1492
1486 void TemplateTable::branch(bool is_jsr, bool is_wide) { 1493 void TemplateTable::branch(bool is_jsr, bool is_wide) {
1491 const ByteSize inv_offset = methodOopDesc::invocation_counter_offset() + InvocationCounter::counter_offset(); 1498 const ByteSize inv_offset = methodOopDesc::invocation_counter_offset() + InvocationCounter::counter_offset();
1492 const int method_offset = frame::interpreter_frame_method_offset * wordSize; 1499 const int method_offset = frame::interpreter_frame_method_offset * wordSize;
1493 1500
1494 // Load up EDX with the branch displacement 1501 // Load up EDX with the branch displacement
1495 __ movl(rdx, at_bcp(1)); 1502 __ movl(rdx, at_bcp(1));
1496 __ bswap(rdx); 1503 __ bswapl(rdx);
1497 if (!is_wide) __ sarl(rdx, 16); 1504 if (!is_wide) __ sarl(rdx, 16);
1505 LP64_ONLY(__ movslq(rdx, rdx));
1506
1498 1507
1499 // Handle all the JSR stuff here, then exit. 1508 // Handle all the JSR stuff here, then exit.
1500 // It's much shorter and cleaner than intermingling with the 1509 // It's much shorter and cleaner than intermingling with the
1501 // non-JSR normal-branch stuff occuring below. 1510 // non-JSR normal-branch stuff occuring below.
1502 if (is_jsr) { 1511 if (is_jsr) {
1503 // Pre-load the next target bytecode into EBX 1512 // Pre-load the next target bytecode into EBX
1504 __ load_unsigned_byte(rbx, Address(rsi, rdx, Address::times_1, 0)); 1513 __ load_unsigned_byte(rbx, Address(rsi, rdx, Address::times_1, 0));
1505 1514
1506 // compute return address as bci in rax, 1515 // compute return address as bci in rax,
1507 __ leal(rax, at_bcp((is_wide ? 5 : 3) - in_bytes(constMethodOopDesc::codes_offset()))); 1516 __ lea(rax, at_bcp((is_wide ? 5 : 3) - in_bytes(constMethodOopDesc::codes_offset())));
1508 __ subl(rax, Address(rcx, methodOopDesc::const_offset())); 1517 __ subptr(rax, Address(rcx, methodOopDesc::const_offset()));
1509 // Adjust the bcp in ESI by the displacement in EDX 1518 // Adjust the bcp in ESI by the displacement in EDX
1510 __ addl(rsi, rdx); 1519 __ addptr(rsi, rdx);
1511 // Push return address 1520 // Push return address
1512 __ push_i(rax); 1521 __ push_i(rax);
1513 // jsr returns vtos 1522 // jsr returns vtos
1514 __ dispatch_only_noverify(vtos); 1523 __ dispatch_only_noverify(vtos);
1515 return; 1524 return;
1516 } 1525 }
1517 1526
1518 // Normal (non-jsr) branch handling 1527 // Normal (non-jsr) branch handling
1519 1528
1520 // Adjust the bcp in ESI by the displacement in EDX 1529 // Adjust the bcp in ESI by the displacement in EDX
1521 __ addl(rsi, rdx); 1530 __ addptr(rsi, rdx);
1522 1531
1523 assert(UseLoopCounter || !UseOnStackReplacement, "on-stack-replacement requires loop counters"); 1532 assert(UseLoopCounter || !UseOnStackReplacement, "on-stack-replacement requires loop counters");
1524 Label backedge_counter_overflow; 1533 Label backedge_counter_overflow;
1525 Label profile_method; 1534 Label profile_method;
1526 Label dispatch; 1535 Label dispatch;
1535 __ testl(rdx, rdx); // check if forward or backward branch 1544 __ testl(rdx, rdx); // check if forward or backward branch
1536 __ jcc(Assembler::positive, dispatch); // count only if backward branch 1545 __ jcc(Assembler::positive, dispatch); // count only if backward branch
1537 1546
1538 // increment counter 1547 // increment counter
1539 __ movl(rax, Address(rcx, be_offset)); // load backedge counter 1548 __ movl(rax, Address(rcx, be_offset)); // load backedge counter
1540 __ increment(rax, InvocationCounter::count_increment); // increment counter 1549 __ incrementl(rax, InvocationCounter::count_increment); // increment counter
1541 __ movl(Address(rcx, be_offset), rax); // store counter 1550 __ movl(Address(rcx, be_offset), rax); // store counter
1542 1551
1543 __ movl(rax, Address(rcx, inv_offset)); // load invocation counter 1552 __ movl(rax, Address(rcx, inv_offset)); // load invocation counter
1544 __ andl(rax, InvocationCounter::count_mask_value); // and the status bits 1553 __ andl(rax, InvocationCounter::count_mask_value); // and the status bits
1545 __ addl(rax, Address(rcx, be_offset)); // add both counters 1554 __ addl(rax, Address(rcx, be_offset)); // add both counters
1563 // methodDataOop, which value does not get reset on the call to 1572 // methodDataOop, which value does not get reset on the call to
1564 // frequency_counter_overflow(). To avoid excessive calls to the overflow 1573 // frequency_counter_overflow(). To avoid excessive calls to the overflow
1565 // routine while the method is being compiled, add a second test to make 1574 // routine while the method is being compiled, add a second test to make
1566 // sure the overflow function is called only once every overflow_frequency. 1575 // sure the overflow function is called only once every overflow_frequency.
1567 const int overflow_frequency = 1024; 1576 const int overflow_frequency = 1024;
1568 __ andl(rbx, overflow_frequency-1); 1577 __ andptr(rbx, overflow_frequency-1);
1569 __ jcc(Assembler::zero, backedge_counter_overflow); 1578 __ jcc(Assembler::zero, backedge_counter_overflow);
1570 1579
1571 } 1580 }
1572 } else { 1581 } else {
1573 if (UseOnStackReplacement) { 1582 if (UseOnStackReplacement) {
1594 if (ProfileInterpreter) { 1603 if (ProfileInterpreter) {
1595 // Out-of-line code to allocate method data oop. 1604 // Out-of-line code to allocate method data oop.
1596 __ bind(profile_method); 1605 __ bind(profile_method);
1597 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::profile_method), rsi); 1606 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::profile_method), rsi);
1598 __ load_unsigned_byte(rbx, Address(rsi, 0)); // restore target bytecode 1607 __ load_unsigned_byte(rbx, Address(rsi, 0)); // restore target bytecode
1599 __ movl(rcx, Address(rbp, method_offset)); 1608 __ movptr(rcx, Address(rbp, method_offset));
1600 __ movl(rcx, Address(rcx, in_bytes(methodOopDesc::method_data_offset()))); 1609 __ movptr(rcx, Address(rcx, in_bytes(methodOopDesc::method_data_offset())));
1601 __ movl(Address(rbp, frame::interpreter_frame_mdx_offset * wordSize), rcx); 1610 __ movptr(Address(rbp, frame::interpreter_frame_mdx_offset * wordSize), rcx);
1602 __ test_method_data_pointer(rcx, dispatch); 1611 __ test_method_data_pointer(rcx, dispatch);
1603 // offset non-null mdp by MDO::data_offset() + IR::profile_method() 1612 // offset non-null mdp by MDO::data_offset() + IR::profile_method()
1604 __ addl(rcx, in_bytes(methodDataOopDesc::data_offset())); 1613 __ addptr(rcx, in_bytes(methodDataOopDesc::data_offset()));
1605 __ addl(rcx, rax); 1614 __ addptr(rcx, rax);
1606 __ movl(Address(rbp, frame::interpreter_frame_mdx_offset * wordSize), rcx); 1615 __ movptr(Address(rbp, frame::interpreter_frame_mdx_offset * wordSize), rcx);
1607 __ jmp(dispatch); 1616 __ jmp(dispatch);
1608 } 1617 }
1609 1618
1610 if (UseOnStackReplacement) { 1619 if (UseOnStackReplacement) {
1611 1620
1612 // invocation counter overflow 1621 // invocation counter overflow
1613 __ bind(backedge_counter_overflow); 1622 __ bind(backedge_counter_overflow);
1614 __ negl(rdx); 1623 __ negptr(rdx);
1615 __ addl(rdx, rsi); // branch bcp 1624 __ addptr(rdx, rsi); // branch bcp
1616 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), rdx); 1625 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), rdx);
1617 __ load_unsigned_byte(rbx, Address(rsi, 0)); // restore target bytecode 1626 __ load_unsigned_byte(rbx, Address(rsi, 0)); // restore target bytecode
1618 1627
1619 // rax,: osr nmethod (osr ok) or NULL (osr not possible) 1628 // rax,: osr nmethod (osr ok) or NULL (osr not possible)
1620 // rbx,: target bytecode 1629 // rbx,: target bytecode
1621 // rdx: scratch 1630 // rdx: scratch
1622 // rdi: locals pointer 1631 // rdi: locals pointer
1623 // rsi: bcp 1632 // rsi: bcp
1624 __ testl(rax, rax); // test result 1633 __ testptr(rax, rax); // test result
1625 __ jcc(Assembler::zero, dispatch); // no osr if null 1634 __ jcc(Assembler::zero, dispatch); // no osr if null
1626 // nmethod may have been invalidated (VM may block upon call_VM return) 1635 // nmethod may have been invalidated (VM may block upon call_VM return)
1627 __ movl(rcx, Address(rax, nmethod::entry_bci_offset())); 1636 __ movl(rcx, Address(rax, nmethod::entry_bci_offset()));
1628 __ cmpl(rcx, InvalidOSREntryBci); 1637 __ cmpl(rcx, InvalidOSREntryBci);
1629 __ jcc(Assembler::equal, dispatch); 1638 __ jcc(Assembler::equal, dispatch);
1630 1639
1631 // We have the address of an on stack replacement routine in rax, 1640 // We have the address of an on stack replacement routine in rax,
1632 // We need to prepare to execute the OSR method. First we must 1641 // We need to prepare to execute the OSR method. First we must
1633 // migrate the locals and monitors off of the stack. 1642 // migrate the locals and monitors off of the stack.
1634 1643
1635 __ movl(rbx, rax); // save the nmethod 1644 __ mov(rbx, rax); // save the nmethod
1636 1645
1637 const Register thread = rcx; 1646 const Register thread = rcx;
1638 __ get_thread(thread); 1647 __ get_thread(thread);
1639 call_VM(noreg, CAST_FROM_FN_PTR(address, SharedRuntime::OSR_migration_begin)); 1648 call_VM(noreg, CAST_FROM_FN_PTR(address, SharedRuntime::OSR_migration_begin));
1640 // rax, is OSR buffer, move it to expected parameter location 1649 // rax, is OSR buffer, move it to expected parameter location
1641 __ movl(rcx, rax); 1650 __ mov(rcx, rax);
1642 1651
1643 // pop the interpreter frame 1652 // pop the interpreter frame
1644 __ movl(rdx, Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize)); // get sender sp 1653 __ movptr(rdx, Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize)); // get sender sp
1645 __ leave(); // remove frame anchor 1654 __ leave(); // remove frame anchor
1646 __ popl(rdi); // get return address 1655 __ pop(rdi); // get return address
1647 __ movl(rsp, rdx); // set sp to sender sp 1656 __ mov(rsp, rdx); // set sp to sender sp
1648 1657
1649 1658
1650 Label skip; 1659 Label skip;
1651 Label chkint; 1660 Label chkint;
1652 1661
1661 1670
1662 __ cmp32(rdi, ExternalAddress(StubRoutines::_call_stub_return_address)); 1671 __ cmp32(rdi, ExternalAddress(StubRoutines::_call_stub_return_address));
1663 __ jcc(Assembler::notEqual, chkint); 1672 __ jcc(Assembler::notEqual, chkint);
1664 1673
1665 // yes adjust to the specialized call stub return. 1674 // yes adjust to the specialized call stub return.
1666 assert(StubRoutines::i486::get_call_stub_compiled_return() != NULL, "must be set"); 1675 assert(StubRoutines::x86::get_call_stub_compiled_return() != NULL, "must be set");
1667 __ lea(rdi, ExternalAddress(StubRoutines::i486::get_call_stub_compiled_return())); 1676 __ lea(rdi, ExternalAddress(StubRoutines::x86::get_call_stub_compiled_return()));
1668 __ jmp(skip); 1677 __ jmp(skip);
1669 1678
1670 __ bind(chkint); 1679 __ bind(chkint);
1671 1680
1672 // Are we returning to the interpreter? Look for sentinel 1681 // Are we returning to the interpreter? Look for sentinel
1673 1682
1674 __ cmpl(Address(rdi, -8), Interpreter::return_sentinel); 1683 __ cmpl(Address(rdi, -2*wordSize), Interpreter::return_sentinel);
1675 __ jcc(Assembler::notEqual, skip); 1684 __ jcc(Assembler::notEqual, skip);
1676 1685
1677 // Adjust to compiled return back to interpreter 1686 // Adjust to compiled return back to interpreter
1678 1687
1679 __ movl(rdi, Address(rdi, -4)); 1688 __ movptr(rdi, Address(rdi, -wordSize));
1680 __ bind(skip); 1689 __ bind(skip);
1681 1690
1682 // Align stack pointer for compiled code (note that caller is 1691 // Align stack pointer for compiled code (note that caller is
1683 // responsible for undoing this fixup by remembering the old SP 1692 // responsible for undoing this fixup by remembering the old SP
1684 // in an rbp,-relative location) 1693 // in an rbp,-relative location)
1685 __ andl(rsp, -(StackAlignmentInBytes)); 1694 __ andptr(rsp, -(StackAlignmentInBytes));
1686 1695
1687 // push the (possibly adjusted) return address 1696 // push the (possibly adjusted) return address
1688 __ pushl(rdi); 1697 __ push(rdi);
1689 1698
1690 // and begin the OSR nmethod 1699 // and begin the OSR nmethod
1691 __ jmp(Address(rbx, nmethod::osr_entry_point_offset())); 1700 __ jmp(Address(rbx, nmethod::osr_entry_point_offset()));
1692 } 1701 }
1693 } 1702 }
1721 1730
1722 void TemplateTable::if_nullcmp(Condition cc) { 1731 void TemplateTable::if_nullcmp(Condition cc) {
1723 transition(atos, vtos); 1732 transition(atos, vtos);
1724 // assume branch is more often taken than not (loops use backward branches) 1733 // assume branch is more often taken than not (loops use backward branches)
1725 Label not_taken; 1734 Label not_taken;
1726 __ testl(rax, rax); 1735 __ testptr(rax, rax);
1727 __ jcc(j_not(cc), not_taken); 1736 __ jcc(j_not(cc), not_taken);
1728 branch(false, false); 1737 branch(false, false);
1729 __ bind(not_taken); 1738 __ bind(not_taken);
1730 __ profile_not_taken_branch(rax); 1739 __ profile_not_taken_branch(rax);
1731 } 1740 }
1734 void TemplateTable::if_acmp(Condition cc) { 1743 void TemplateTable::if_acmp(Condition cc) {
1735 transition(atos, vtos); 1744 transition(atos, vtos);
1736 // assume branch is more often taken than not (loops use backward branches) 1745 // assume branch is more often taken than not (loops use backward branches)
1737 Label not_taken; 1746 Label not_taken;
1738 __ pop_ptr(rdx); 1747 __ pop_ptr(rdx);
1739 __ cmpl(rdx, rax); 1748 __ cmpptr(rdx, rax);
1740 __ jcc(j_not(cc), not_taken); 1749 __ jcc(j_not(cc), not_taken);
1741 branch(false, false); 1750 branch(false, false);
1742 __ bind(not_taken); 1751 __ bind(not_taken);
1743 __ profile_not_taken_branch(rax); 1752 __ profile_not_taken_branch(rax);
1744 } 1753 }
1745 1754
1746 1755
1747 void TemplateTable::ret() { 1756 void TemplateTable::ret() {
1748 transition(vtos, vtos); 1757 transition(vtos, vtos);
1749 locals_index(rbx); 1758 locals_index(rbx);
1750 __ movl(rbx, iaddress(rbx)); // get return bci, compute return bcp 1759 __ movptr(rbx, iaddress(rbx)); // get return bci, compute return bcp
1751 __ profile_ret(rbx, rcx); 1760 __ profile_ret(rbx, rcx);
1752 __ get_method(rax); 1761 __ get_method(rax);
1753 __ movl(rsi, Address(rax, methodOopDesc::const_offset())); 1762 __ movptr(rsi, Address(rax, methodOopDesc::const_offset()));
1754 __ leal(rsi, Address(rsi, rbx, Address::times_1, 1763 __ lea(rsi, Address(rsi, rbx, Address::times_1,
1755 constMethodOopDesc::codes_offset())); 1764 constMethodOopDesc::codes_offset()));
1756 __ dispatch_next(vtos); 1765 __ dispatch_next(vtos);
1757 } 1766 }
1758 1767
1759 1768
1760 void TemplateTable::wide_ret() { 1769 void TemplateTable::wide_ret() {
1761 transition(vtos, vtos); 1770 transition(vtos, vtos);
1762 locals_index_wide(rbx); 1771 locals_index_wide(rbx);
1763 __ movl(rbx, iaddress(rbx)); // get return bci, compute return bcp 1772 __ movptr(rbx, iaddress(rbx)); // get return bci, compute return bcp
1764 __ profile_ret(rbx, rcx); 1773 __ profile_ret(rbx, rcx);
1765 __ get_method(rax); 1774 __ get_method(rax);
1766 __ movl(rsi, Address(rax, methodOopDesc::const_offset())); 1775 __ movptr(rsi, Address(rax, methodOopDesc::const_offset()));
1767 __ leal(rsi, Address(rsi, rbx, Address::times_1, constMethodOopDesc::codes_offset())); 1776 __ lea(rsi, Address(rsi, rbx, Address::times_1, constMethodOopDesc::codes_offset()));
1768 __ dispatch_next(vtos); 1777 __ dispatch_next(vtos);
1769 } 1778 }
1770 1779
1771 1780
1772 void TemplateTable::tableswitch() { 1781 void TemplateTable::tableswitch() {
1773 Label default_case, continue_execution; 1782 Label default_case, continue_execution;
1774 transition(itos, vtos); 1783 transition(itos, vtos);
1775 // align rsi 1784 // align rsi
1776 __ leal(rbx, at_bcp(wordSize)); 1785 __ lea(rbx, at_bcp(wordSize));
1777 __ andl(rbx, -wordSize); 1786 __ andptr(rbx, -wordSize);
1778 // load lo & hi 1787 // load lo & hi
1779 __ movl(rcx, Address(rbx, 1 * wordSize)); 1788 __ movl(rcx, Address(rbx, 1 * wordSize));
1780 __ movl(rdx, Address(rbx, 2 * wordSize)); 1789 __ movl(rdx, Address(rbx, 2 * wordSize));
1781 __ bswap(rcx); 1790 __ bswapl(rcx);
1782 __ bswap(rdx); 1791 __ bswapl(rdx);
1783 // check against lo & hi 1792 // check against lo & hi
1784 __ cmpl(rax, rcx); 1793 __ cmpl(rax, rcx);
1785 __ jccb(Assembler::less, default_case); 1794 __ jccb(Assembler::less, default_case);
1786 __ cmpl(rax, rdx); 1795 __ cmpl(rax, rdx);
1787 __ jccb(Assembler::greater, default_case); 1796 __ jccb(Assembler::greater, default_case);
1788 // lookup dispatch offset 1797 // lookup dispatch offset
1789 __ subl(rax, rcx); 1798 __ subl(rax, rcx);
1790 __ movl(rdx, Address(rbx, rax, Address::times_4, 3 * wordSize)); 1799 __ movl(rdx, Address(rbx, rax, Address::times_4, 3 * BytesPerInt));
1791 __ profile_switch_case(rax, rbx, rcx); 1800 __ profile_switch_case(rax, rbx, rcx);
1792 // continue execution 1801 // continue execution
1793 __ bind(continue_execution); 1802 __ bind(continue_execution);
1794 __ bswap(rdx); 1803 __ bswapl(rdx);
1795 __ load_unsigned_byte(rbx, Address(rsi, rdx, Address::times_1)); 1804 __ load_unsigned_byte(rbx, Address(rsi, rdx, Address::times_1));
1796 __ addl(rsi, rdx); 1805 __ addptr(rsi, rdx);
1797 __ dispatch_only(vtos); 1806 __ dispatch_only(vtos);
1798 // handle default 1807 // handle default
1799 __ bind(default_case); 1808 __ bind(default_case);
1800 __ profile_switch_default(rax); 1809 __ profile_switch_default(rax);
1801 __ movl(rdx, Address(rbx, 0)); 1810 __ movl(rdx, Address(rbx, 0));
1810 1819
1811 1820
1812 void TemplateTable::fast_linearswitch() { 1821 void TemplateTable::fast_linearswitch() {
1813 transition(itos, vtos); 1822 transition(itos, vtos);
1814 Label loop_entry, loop, found, continue_execution; 1823 Label loop_entry, loop, found, continue_execution;
1815 // bswap rax, so we can avoid bswapping the table entries 1824 // bswapl rax, so we can avoid bswapping the table entries
1816 __ bswap(rax); 1825 __ bswapl(rax);
1817 // align rsi 1826 // align rsi
1818 __ leal(rbx, at_bcp(wordSize)); // btw: should be able to get rid of this instruction (change offsets below) 1827 __ lea(rbx, at_bcp(wordSize)); // btw: should be able to get rid of this instruction (change offsets below)
1819 __ andl(rbx, -wordSize); 1828 __ andptr(rbx, -wordSize);
1820 // set counter 1829 // set counter
1821 __ movl(rcx, Address(rbx, wordSize)); 1830 __ movl(rcx, Address(rbx, wordSize));
1822 __ bswap(rcx); 1831 __ bswapl(rcx);
1823 __ jmpb(loop_entry); 1832 __ jmpb(loop_entry);
1824 // table search 1833 // table search
1825 __ bind(loop); 1834 __ bind(loop);
1826 __ cmpl(rax, Address(rbx, rcx, Address::times_8, 2 * wordSize)); 1835 __ cmpl(rax, Address(rbx, rcx, Address::times_8, 2 * wordSize));
1827 __ jccb(Assembler::equal, found); 1836 __ jccb(Assembler::equal, found);
1828 __ bind(loop_entry); 1837 __ bind(loop_entry);
1829 __ decrement(rcx); 1838 __ decrementl(rcx);
1830 __ jcc(Assembler::greaterEqual, loop); 1839 __ jcc(Assembler::greaterEqual, loop);
1831 // default case 1840 // default case
1832 __ profile_switch_default(rax); 1841 __ profile_switch_default(rax);
1833 __ movl(rdx, Address(rbx, 0)); 1842 __ movl(rdx, Address(rbx, 0));
1834 __ jmpb(continue_execution); 1843 __ jmpb(continue_execution);
1836 __ bind(found); 1845 __ bind(found);
1837 __ movl(rdx, Address(rbx, rcx, Address::times_8, 3 * wordSize)); 1846 __ movl(rdx, Address(rbx, rcx, Address::times_8, 3 * wordSize));
1838 __ profile_switch_case(rcx, rax, rbx); 1847 __ profile_switch_case(rcx, rax, rbx);
1839 // continue execution 1848 // continue execution
1840 __ bind(continue_execution); 1849 __ bind(continue_execution);
1841 __ bswap(rdx); 1850 __ bswapl(rdx);
1842 __ load_unsigned_byte(rbx, Address(rsi, rdx, Address::times_1)); 1851 __ load_unsigned_byte(rbx, Address(rsi, rdx, Address::times_1));
1843 __ addl(rsi, rdx); 1852 __ addptr(rsi, rdx);
1844 __ dispatch_only(vtos); 1853 __ dispatch_only(vtos);
1845 } 1854 }
1846 1855
1847 1856
1848 void TemplateTable::fast_binaryswitch() { 1857 void TemplateTable::fast_binaryswitch() {
1880 const Register h = rdi; // needs to be restored 1889 const Register h = rdi; // needs to be restored
1881 const Register temp = rsi; 1890 const Register temp = rsi;
1882 // setup array 1891 // setup array
1883 __ save_bcp(); 1892 __ save_bcp();
1884 1893
1885 __ leal(array, at_bcp(3*wordSize)); // btw: should be able to get rid of this instruction (change offsets below) 1894 __ lea(array, at_bcp(3*wordSize)); // btw: should be able to get rid of this instruction (change offsets below)
1886 __ andl(array, -wordSize); 1895 __ andptr(array, -wordSize);
1887 // initialize i & j 1896 // initialize i & j
1888 __ xorl(i, i); // i = 0; 1897 __ xorl(i, i); // i = 0;
1889 __ movl(j, Address(array, -wordSize)); // j = length(array); 1898 __ movl(j, Address(array, -wordSize)); // j = length(array);
1890 // Convert j into native byteordering 1899 // Convert j into native byteordering
1891 __ bswap(j); 1900 __ bswapl(j);
1892 // and start 1901 // and start
1893 Label entry; 1902 Label entry;
1894 __ jmp(entry); 1903 __ jmp(entry);
1895 1904
1896 // binary search loop 1905 // binary search loop
1904 // } else { 1913 // } else {
1905 // i = h; 1914 // i = h;
1906 // } 1915 // }
1907 // Convert array[h].match to native byte-ordering before compare 1916 // Convert array[h].match to native byte-ordering before compare
1908 __ movl(temp, Address(array, h, Address::times_8, 0*wordSize)); 1917 __ movl(temp, Address(array, h, Address::times_8, 0*wordSize));
1909 __ bswap(temp); 1918 __ bswapl(temp);
1910 __ cmpl(key, temp); 1919 __ cmpl(key, temp);
1911 if (VM_Version::supports_cmov()) { 1920 if (VM_Version::supports_cmov()) {
1912 __ cmovl(Assembler::less , j, h); // j = h if (key < array[h].fast_match()) 1921 __ cmovl(Assembler::less , j, h); // j = h if (key < array[h].fast_match())
1913 __ cmovl(Assembler::greaterEqual, i, h); // i = h if (key >= array[h].fast_match()) 1922 __ cmovl(Assembler::greaterEqual, i, h); // i = h if (key >= array[h].fast_match())
1914 } else { 1923 } else {
1915 Label set_i, end_of_if; 1924 Label set_i, end_of_if;
1916 __ jccb(Assembler::greaterEqual, set_i); // { 1925 __ jccb(Assembler::greaterEqual, set_i); // {
1917 __ movl(j, h); // j = h; 1926 __ mov(j, h); // j = h;
1918 __ jmp(end_of_if); // } 1927 __ jmp(end_of_if); // }
1919 __ bind(set_i); // else { 1928 __ bind(set_i); // else {
1920 __ movl(i, h); // i = h; 1929 __ mov(i, h); // i = h;
1921 __ bind(end_of_if); // } 1930 __ bind(end_of_if); // }
1922 } 1931 }
1923 // while (i+1 < j) 1932 // while (i+1 < j)
1924 __ bind(entry); 1933 __ bind(entry);
1925 __ leal(h, Address(i, 1)); // i+1 1934 __ leal(h, Address(i, 1)); // i+1
1926 __ cmpl(h, j); // i+1 < j 1935 __ cmpl(h, j); // i+1 < j
1929 1938
1930 // end of binary search, result index is i (must check again!) 1939 // end of binary search, result index is i (must check again!)
1931 Label default_case; 1940 Label default_case;
1932 // Convert array[i].match to native byte-ordering before compare 1941 // Convert array[i].match to native byte-ordering before compare
1933 __ movl(temp, Address(array, i, Address::times_8, 0*wordSize)); 1942 __ movl(temp, Address(array, i, Address::times_8, 0*wordSize));
1934 __ bswap(temp); 1943 __ bswapl(temp);
1935 __ cmpl(key, temp); 1944 __ cmpl(key, temp);
1936 __ jcc(Assembler::notEqual, default_case); 1945 __ jcc(Assembler::notEqual, default_case);
1937 1946
1938 // entry found -> j = offset 1947 // entry found -> j = offset
1939 __ movl(j , Address(array, i, Address::times_8, 1*wordSize)); 1948 __ movl(j , Address(array, i, Address::times_8, 1*wordSize));
1940 __ profile_switch_case(i, key, array); 1949 __ profile_switch_case(i, key, array);
1941 __ bswap(j); 1950 __ bswapl(j);
1951 LP64_ONLY(__ movslq(j, j));
1942 __ restore_bcp(); 1952 __ restore_bcp();
1943 __ restore_locals(); // restore rdi 1953 __ restore_locals(); // restore rdi
1944 __ load_unsigned_byte(rbx, Address(rsi, j, Address::times_1)); 1954 __ load_unsigned_byte(rbx, Address(rsi, j, Address::times_1));
1945 1955
1946 __ addl(rsi, j); 1956 __ addptr(rsi, j);
1947 __ dispatch_only(vtos); 1957 __ dispatch_only(vtos);
1948 1958
1949 // default case -> j = default offset 1959 // default case -> j = default offset
1950 __ bind(default_case); 1960 __ bind(default_case);
1951 __ profile_switch_default(i); 1961 __ profile_switch_default(i);
1952 __ movl(j, Address(array, -2*wordSize)); 1962 __ movl(j, Address(array, -2*wordSize));
1953 __ bswap(j); 1963 __ bswapl(j);
1964 LP64_ONLY(__ movslq(j, j));
1954 __ restore_bcp(); 1965 __ restore_bcp();
1955 __ restore_locals(); // restore rdi 1966 __ restore_locals(); // restore rdi
1956 __ load_unsigned_byte(rbx, Address(rsi, j, Address::times_1)); 1967 __ load_unsigned_byte(rbx, Address(rsi, j, Address::times_1));
1957 __ addl(rsi, j); 1968 __ addptr(rsi, j);
1958 __ dispatch_only(vtos); 1969 __ dispatch_only(vtos);
1959 } 1970 }
1960 1971
1961 1972
1962 void TemplateTable::_return(TosState state) { 1973 void TemplateTable::_return(TosState state) {
1963 transition(state, state); 1974 transition(state, state);
1964 assert(_desc->calls_vm(), "inconsistent calls_vm information"); // call in remove_activation 1975 assert(_desc->calls_vm(), "inconsistent calls_vm information"); // call in remove_activation
1965 1976
1966 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) { 1977 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
1967 assert(state == vtos, "only valid state"); 1978 assert(state == vtos, "only valid state");
1968 __ movl(rax, aaddress(0)); 1979 __ movptr(rax, aaddress(0));
1969 __ movl(rdi, Address(rax, oopDesc::klass_offset_in_bytes())); 1980 __ movptr(rdi, Address(rax, oopDesc::klass_offset_in_bytes()));
1970 __ movl(rdi, Address(rdi, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc))); 1981 __ movl(rdi, Address(rdi, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc)));
1971 __ testl(rdi, JVM_ACC_HAS_FINALIZER); 1982 __ testl(rdi, JVM_ACC_HAS_FINALIZER);
1972 Label skip_register_finalizer; 1983 Label skip_register_finalizer;
1973 __ jcc(Assembler::zero, skip_register_finalizer); 1984 __ jcc(Assembler::zero, skip_register_finalizer);
1974 1985
2005 // previous memory refs). Requirements (2) and (3) require some barriers 2016 // previous memory refs). Requirements (2) and (3) require some barriers
2006 // before volatile stores and after volatile loads. These nearly cover 2017 // before volatile stores and after volatile loads. These nearly cover
2007 // requirement (1) but miss the volatile-store-volatile-load case. This final 2018 // requirement (1) but miss the volatile-store-volatile-load case. This final
2008 // case is placed after volatile-stores although it could just as well go 2019 // case is placed after volatile-stores although it could just as well go
2009 // before volatile-loads. 2020 // before volatile-loads.
2010 void TemplateTable::volatile_barrier( ) { 2021 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint ) {
2011 // Helper function to insert a is-volatile test and memory barrier 2022 // Helper function to insert a is-volatile test and memory barrier
2012 if( !os::is_MP() ) return; // Not needed on single CPU 2023 if( !os::is_MP() ) return; // Not needed on single CPU
2013 __ membar(); 2024 __ membar(order_constraint);
2014 } 2025 }
2015 2026
2016 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) { 2027 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) {
2017 assert(byte_no == 1 || byte_no == 2, "byte_no out of range"); 2028 assert(byte_no == 1 || byte_no == 2, "byte_no out of range");
2018 2029
2021 assert_different_registers(Rcache, index, temp); 2032 assert_different_registers(Rcache, index, temp);
2022 2033
2023 const int shift_count = (1 + byte_no)*BitsPerByte; 2034 const int shift_count = (1 + byte_no)*BitsPerByte;
2024 Label resolved; 2035 Label resolved;
2025 __ get_cache_and_index_at_bcp(Rcache, index, 1); 2036 __ get_cache_and_index_at_bcp(Rcache, index, 1);
2026 __ movl(temp, Address(Rcache, index, Address::times_4, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset())); 2037 __ movl(temp, Address(Rcache,
2038 index,
2039 Address::times_ptr,
2040 constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset()));
2027 __ shrl(temp, shift_count); 2041 __ shrl(temp, shift_count);
2028 // have we resolved this bytecode? 2042 // have we resolved this bytecode?
2029 __ andl(temp, 0xFF); 2043 __ andptr(temp, 0xFF);
2030 __ cmpl(temp, (int)bytecode()); 2044 __ cmpl(temp, (int)bytecode());
2031 __ jcc(Assembler::equal, resolved); 2045 __ jcc(Assembler::equal, resolved);
2032 2046
2033 // resolve first time through 2047 // resolve first time through
2034 address entry; 2048 address entry;
2060 bool is_static = false) { 2074 bool is_static = false) {
2061 assert_different_registers(cache, index, flags, off); 2075 assert_different_registers(cache, index, flags, off);
2062 2076
2063 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2077 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2064 // Field offset 2078 // Field offset
2065 __ movl(off, Address(cache, index, Address::times_4, 2079 __ movptr(off, Address(cache, index, Address::times_ptr,
2066 in_bytes(cp_base_offset + ConstantPoolCacheEntry::f2_offset()))); 2080 in_bytes(cp_base_offset + ConstantPoolCacheEntry::f2_offset())));
2067 // Flags 2081 // Flags
2068 __ movl(flags, Address(cache, index, Address::times_4, 2082 __ movl(flags, Address(cache, index, Address::times_ptr,
2069 in_bytes(cp_base_offset + ConstantPoolCacheEntry::flags_offset()))); 2083 in_bytes(cp_base_offset + ConstantPoolCacheEntry::flags_offset())));
2070 2084
2071 // klass overwrite register 2085 // klass overwrite register
2072 if (is_static) { 2086 if (is_static) {
2073 __ movl(obj, Address(cache, index, Address::times_4, 2087 __ movptr(obj, Address(cache, index, Address::times_ptr,
2074 in_bytes(cp_base_offset + ConstantPoolCacheEntry::f1_offset()))); 2088 in_bytes(cp_base_offset + ConstantPoolCacheEntry::f1_offset())));
2075 } 2089 }
2076 } 2090 }
2077 2091
2078 void TemplateTable::load_invoke_cp_cache_entry(int byte_no, 2092 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2079 Register method, 2093 Register method,
2102 const int index_offset = in_bytes(constantPoolCacheOopDesc::base_offset() + 2116 const int index_offset = in_bytes(constantPoolCacheOopDesc::base_offset() +
2103 ConstantPoolCacheEntry::f2_offset()); 2117 ConstantPoolCacheEntry::f2_offset());
2104 2118
2105 resolve_cache_and_index(byte_no, cache, index); 2119 resolve_cache_and_index(byte_no, cache, index);
2106 2120
2107 assert(wordSize == 4, "adjust code below"); 2121 __ movptr(method, Address(cache, index, Address::times_ptr, method_offset));
2108 __ movl(method, Address(cache, index, Address::times_4, method_offset));
2109 if (itable_index != noreg) { 2122 if (itable_index != noreg) {
2110 __ movl(itable_index, Address(cache, index, Address::times_4, index_offset)); 2123 __ movptr(itable_index, Address(cache, index, Address::times_ptr, index_offset));
2111 } 2124 }
2112 __ movl(flags , Address(cache, index, Address::times_4, flags_offset )); 2125 __ movl(flags , Address(cache, index, Address::times_ptr, flags_offset ));
2113 } 2126 }
2114 2127
2115 2128
2116 // The registers cache and index expected to be set before call. 2129 // The registers cache and index expected to be set before call.
2117 // Correct values of the cache and index registers are preserved. 2130 // Correct values of the cache and index registers are preserved.
2127 __ mov32(rax, ExternalAddress((address) JvmtiExport::get_field_access_count_addr())); 2140 __ mov32(rax, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
2128 __ testl(rax,rax); 2141 __ testl(rax,rax);
2129 __ jcc(Assembler::zero, L1); 2142 __ jcc(Assembler::zero, L1);
2130 2143
2131 // cache entry pointer 2144 // cache entry pointer
2132 __ addl(cache, in_bytes(constantPoolCacheOopDesc::base_offset())); 2145 __ addptr(cache, in_bytes(constantPoolCacheOopDesc::base_offset()));
2133 __ shll(index, LogBytesPerWord); 2146 __ shll(index, LogBytesPerWord);
2134 __ addl(cache, index); 2147 __ addptr(cache, index);
2135 if (is_static) { 2148 if (is_static) {
2136 __ movl(rax, 0); // NULL object reference 2149 __ xorptr(rax, rax); // NULL object reference
2137 } else { 2150 } else {
2138 __ pop(atos); // Get the object 2151 __ pop(atos); // Get the object
2139 __ verify_oop(rax); 2152 __ verify_oop(rax);
2140 __ push(atos); // Restore stack state 2153 __ push(atos); // Restore stack state
2141 } 2154 }
2175 Label Done, notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble; 2188 Label Done, notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
2176 2189
2177 __ shrl(flags, ConstantPoolCacheEntry::tosBits); 2190 __ shrl(flags, ConstantPoolCacheEntry::tosBits);
2178 assert(btos == 0, "change code, btos != 0"); 2191 assert(btos == 0, "change code, btos != 0");
2179 // btos 2192 // btos
2180 __ andl(flags, 0x0f); 2193 __ andptr(flags, 0x0f);
2181 __ jcc(Assembler::notZero, notByte); 2194 __ jcc(Assembler::notZero, notByte);
2182 2195
2183 __ load_signed_byte(rax, lo ); 2196 __ load_signed_byte(rax, lo );
2184 __ push(btos); 2197 __ push(btos);
2185 // Rewrite bytecode to be faster 2198 // Rewrite bytecode to be faster
2243 __ jcc(Assembler::notEqual, notLong); 2256 __ jcc(Assembler::notEqual, notLong);
2244 2257
2245 // Generate code as if volatile. There just aren't enough registers to 2258 // Generate code as if volatile. There just aren't enough registers to
2246 // save that information and this code is faster than the test. 2259 // save that information and this code is faster than the test.
2247 __ fild_d(lo); // Must load atomically 2260 __ fild_d(lo); // Must load atomically
2248 __ subl(rsp,2*wordSize); // Make space for store 2261 __ subptr(rsp,2*wordSize); // Make space for store
2249 __ fistp_d(Address(rsp,0)); 2262 __ fistp_d(Address(rsp,0));
2250 __ popl(rax); 2263 __ pop(rax);
2251 __ popl(rdx); 2264 __ pop(rdx);
2252 2265
2253 __ push(ltos); 2266 __ push(ltos);
2254 // Don't rewrite to _fast_lgetfield for potential volatile case. 2267 // Don't rewrite to _fast_lgetfield for potential volatile case.
2255 __ jmp(Done); 2268 __ jmp(Done);
2256 2269
2317 // registers have to be correspondingly used after this line. 2330 // registers have to be correspondingly used after this line.
2318 __ get_cache_and_index_at_bcp(rax, rdx, 1); 2331 __ get_cache_and_index_at_bcp(rax, rdx, 1);
2319 2332
2320 if (is_static) { 2333 if (is_static) {
2321 // Life is simple. Null out the object pointer. 2334 // Life is simple. Null out the object pointer.
2322 __ xorl(rbx, rbx); 2335 __ xorptr(rbx, rbx);
2323 } else { 2336 } else {
2324 // Life is harder. The stack holds the value on top, followed by the object. 2337 // Life is harder. The stack holds the value on top, followed by the object.
2325 // We don't know the size of the value, though; it could be one or two words 2338 // We don't know the size of the value, though; it could be one or two words
2326 // depending on its type. As a result, we must find the type to determine where 2339 // depending on its type. As a result, we must find the type to determine where
2327 // the object is. 2340 // the object is.
2328 Label two_word, valsize_known; 2341 Label two_word, valsize_known;
2329 __ movl(rcx, Address(rax, rdx, Address::times_4, in_bytes(cp_base_offset + 2342 __ movl(rcx, Address(rax, rdx, Address::times_ptr, in_bytes(cp_base_offset +
2330 ConstantPoolCacheEntry::flags_offset()))); 2343 ConstantPoolCacheEntry::flags_offset())));
2331 __ movl(rbx, rsp); 2344 __ mov(rbx, rsp);
2332 __ shrl(rcx, ConstantPoolCacheEntry::tosBits); 2345 __ shrl(rcx, ConstantPoolCacheEntry::tosBits);
2333 // Make sure we don't need to mask rcx for tosBits after the above shift 2346 // Make sure we don't need to mask rcx for tosBits after the above shift
2334 ConstantPoolCacheEntry::verify_tosBits(); 2347 ConstantPoolCacheEntry::verify_tosBits();
2335 __ cmpl(rcx, ltos); 2348 __ cmpl(rcx, ltos);
2336 __ jccb(Assembler::equal, two_word); 2349 __ jccb(Assembler::equal, two_word);
2337 __ cmpl(rcx, dtos); 2350 __ cmpl(rcx, dtos);
2338 __ jccb(Assembler::equal, two_word); 2351 __ jccb(Assembler::equal, two_word);
2339 __ addl(rbx, Interpreter::expr_offset_in_bytes(1)); // one word jvalue (not ltos, dtos) 2352 __ addptr(rbx, Interpreter::expr_offset_in_bytes(1)); // one word jvalue (not ltos, dtos)
2340 __ jmpb(valsize_known); 2353 __ jmpb(valsize_known);
2341 2354
2342 __ bind(two_word); 2355 __ bind(two_word);
2343 __ addl(rbx, Interpreter::expr_offset_in_bytes(2)); // two words jvalue 2356 __ addptr(rbx, Interpreter::expr_offset_in_bytes(2)); // two words jvalue
2344 2357
2345 __ bind(valsize_known); 2358 __ bind(valsize_known);
2346 // setup object pointer 2359 // setup object pointer
2347 __ movl(rbx, Address(rbx, 0)); 2360 __ movptr(rbx, Address(rbx, 0));
2348 } 2361 }
2349 // cache entry pointer 2362 // cache entry pointer
2350 __ addl(rax, in_bytes(cp_base_offset)); 2363 __ addptr(rax, in_bytes(cp_base_offset));
2351 __ shll(rdx, LogBytesPerWord); 2364 __ shll(rdx, LogBytesPerWord);
2352 __ addl(rax, rdx); 2365 __ addptr(rax, rdx);
2353 // object (tos) 2366 // object (tos)
2354 __ movl(rcx, rsp); 2367 __ mov(rcx, rsp);
2355 // rbx,: object pointer set up above (NULL if static) 2368 // rbx,: object pointer set up above (NULL if static)
2356 // rax,: cache entry pointer 2369 // rax,: cache entry pointer
2357 // rcx: jvalue object on the stack 2370 // rcx: jvalue object on the stack
2358 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), 2371 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
2359 rbx, rax, rcx); 2372 rbx, rax, rcx);
2424 __ jcc(Assembler::notEqual, notObj); 2437 __ jcc(Assembler::notEqual, notObj);
2425 2438
2426 __ pop(atos); 2439 __ pop(atos);
2427 if (!is_static) pop_and_check_object(obj); 2440 if (!is_static) pop_and_check_object(obj);
2428 2441
2429 __ movl(lo, rax ); 2442 __ movptr(lo, rax );
2430 __ store_check(obj, lo); // Need to mark card 2443 __ store_check(obj, lo); // Need to mark card
2431 if (!is_static) { 2444 if (!is_static) {
2432 patch_bytecode(Bytecodes::_fast_aputfield, rcx, rbx); 2445 patch_bytecode(Bytecodes::_fast_aputfield, rcx, rbx);
2433 } 2446 }
2434 __ jmp(Done); 2447 __ jmp(Done);
2470 2483
2471 __ pop(ltos); // overwrites rdx, do this after testing volatile. 2484 __ pop(ltos); // overwrites rdx, do this after testing volatile.
2472 if (!is_static) pop_and_check_object(obj); 2485 if (!is_static) pop_and_check_object(obj);
2473 2486
2474 // Replace with real volatile test 2487 // Replace with real volatile test
2475 __ pushl(rdx); 2488 __ push(rdx);
2476 __ pushl(rax); // Must update atomically with FIST 2489 __ push(rax); // Must update atomically with FIST
2477 __ fild_d(Address(rsp,0)); // So load into FPU register 2490 __ fild_d(Address(rsp,0)); // So load into FPU register
2478 __ fistp_d(lo); // and put into memory atomically 2491 __ fistp_d(lo); // and put into memory atomically
2479 __ addl(rsp,2*wordSize); 2492 __ addptr(rsp, 2*wordSize);
2480 volatile_barrier(); 2493 // volatile_barrier();
2494 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2495 Assembler::StoreStore));
2481 // Don't rewrite volatile version 2496 // Don't rewrite volatile version
2482 __ jmp(notVolatile); 2497 __ jmp(notVolatile);
2483 2498
2484 __ bind(notVolatileLong); 2499 __ bind(notVolatileLong);
2485 2500
2486 __ pop(ltos); // overwrites rdx 2501 __ pop(ltos); // overwrites rdx
2487 if (!is_static) pop_and_check_object(obj); 2502 if (!is_static) pop_and_check_object(obj);
2488 __ movl(hi, rdx); 2503 NOT_LP64(__ movptr(hi, rdx));
2489 __ movl(lo, rax); 2504 __ movptr(lo, rax);
2490 if (!is_static) { 2505 if (!is_static) {
2491 patch_bytecode(Bytecodes::_fast_lputfield, rcx, rbx); 2506 patch_bytecode(Bytecodes::_fast_lputfield, rcx, rbx);
2492 } 2507 }
2493 __ jmp(notVolatile); 2508 __ jmp(notVolatile);
2494 2509
2525 __ bind(Done); 2540 __ bind(Done);
2526 2541
2527 // Check for volatile store 2542 // Check for volatile store
2528 __ testl(rdx, rdx); 2543 __ testl(rdx, rdx);
2529 __ jcc(Assembler::zero, notVolatile); 2544 __ jcc(Assembler::zero, notVolatile);
2530 volatile_barrier( ); 2545 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2546 Assembler::StoreStore));
2531 __ bind(notVolatile); 2547 __ bind(notVolatile);
2532 } 2548 }
2533 2549
2534 2550
2535 void TemplateTable::putfield(int byte_no) { 2551 void TemplateTable::putfield(int byte_no) {
2550 __ testl(rcx,rcx); 2566 __ testl(rcx,rcx);
2551 __ jcc(Assembler::zero, L2); 2567 __ jcc(Assembler::zero, L2);
2552 __ pop_ptr(rbx); // copy the object pointer from tos 2568 __ pop_ptr(rbx); // copy the object pointer from tos
2553 __ verify_oop(rbx); 2569 __ verify_oop(rbx);
2554 __ push_ptr(rbx); // put the object pointer back on tos 2570 __ push_ptr(rbx); // put the object pointer back on tos
2555 __ subl(rsp, sizeof(jvalue)); // add space for a jvalue object 2571 __ subptr(rsp, sizeof(jvalue)); // add space for a jvalue object
2556 __ movl(rcx, rsp); 2572 __ mov(rcx, rsp);
2557 __ push_ptr(rbx); // save object pointer so we can steal rbx, 2573 __ push_ptr(rbx); // save object pointer so we can steal rbx,
2558 __ movl(rbx, 0); 2574 __ xorptr(rbx, rbx);
2559 const Address lo_value(rcx, rbx, Address::times_1, 0*wordSize); 2575 const Address lo_value(rcx, rbx, Address::times_1, 0*wordSize);
2560 const Address hi_value(rcx, rbx, Address::times_1, 1*wordSize); 2576 const Address hi_value(rcx, rbx, Address::times_1, 1*wordSize);
2561 switch (bytecode()) { // load values into the jvalue object 2577 switch (bytecode()) { // load values into the jvalue object
2562 case Bytecodes::_fast_bputfield: __ movb(lo_value, rax); break; 2578 case Bytecodes::_fast_bputfield: __ movb(lo_value, rax); break;
2563 case Bytecodes::_fast_sputfield: __ movw(lo_value, rax); break; 2579 case Bytecodes::_fast_sputfield: __ movw(lo_value, rax); break;
2564 case Bytecodes::_fast_cputfield: __ movw(lo_value, rax); break; 2580 case Bytecodes::_fast_cputfield: __ movw(lo_value, rax); break;
2565 case Bytecodes::_fast_iputfield: __ movl(lo_value, rax); break; 2581 case Bytecodes::_fast_iputfield: __ movl(lo_value, rax); break;
2566 case Bytecodes::_fast_lputfield: __ movl(hi_value, rdx); __ movl(lo_value, rax); break; 2582 case Bytecodes::_fast_lputfield:
2583 NOT_LP64(__ movptr(hi_value, rdx));
2584 __ movptr(lo_value, rax);
2585 break;
2586
2567 // need to call fld_s() after fstp_s() to restore the value for below 2587 // need to call fld_s() after fstp_s() to restore the value for below
2568 case Bytecodes::_fast_fputfield: __ fstp_s(lo_value); __ fld_s(lo_value); break; 2588 case Bytecodes::_fast_fputfield: __ fstp_s(lo_value); __ fld_s(lo_value); break;
2589
2569 // need to call fld_d() after fstp_d() to restore the value for below 2590 // need to call fld_d() after fstp_d() to restore the value for below
2570 case Bytecodes::_fast_dputfield: __ fstp_d(lo_value); __ fld_d(lo_value); break; 2591 case Bytecodes::_fast_dputfield: __ fstp_d(lo_value); __ fld_d(lo_value); break;
2592
2571 // since rcx is not an object we don't call store_check() here 2593 // since rcx is not an object we don't call store_check() here
2572 case Bytecodes::_fast_aputfield: __ movl(lo_value, rax); break; 2594 case Bytecodes::_fast_aputfield: __ movptr(lo_value, rax); break;
2595
2573 default: ShouldNotReachHere(); 2596 default: ShouldNotReachHere();
2574 } 2597 }
2575 __ pop_ptr(rbx); // restore copy of object pointer 2598 __ pop_ptr(rbx); // restore copy of object pointer
2576 2599
2577 // Save rax, and sometimes rdx because call_VM() will clobber them, 2600 // Save rax, and sometimes rdx because call_VM() will clobber them,
2578 // then use them for JVM/DI purposes 2601 // then use them for JVM/DI purposes
2579 __ pushl(rax); 2602 __ push(rax);
2580 if (bytecode() == Bytecodes::_fast_lputfield) __ pushl(rdx); 2603 if (bytecode() == Bytecodes::_fast_lputfield) __ push(rdx);
2581 // access constant pool cache entry 2604 // access constant pool cache entry
2582 __ get_cache_entry_pointer_at_bcp(rax, rdx, 1); 2605 __ get_cache_entry_pointer_at_bcp(rax, rdx, 1);
2583 __ verify_oop(rbx); 2606 __ verify_oop(rbx);
2584 // rbx,: object pointer copied above 2607 // rbx,: object pointer copied above
2585 // rax,: cache entry pointer 2608 // rax,: cache entry pointer
2586 // rcx: jvalue object on the stack 2609 // rcx: jvalue object on the stack
2587 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx); 2610 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx);
2588 if (bytecode() == Bytecodes::_fast_lputfield) __ popl(rdx); // restore high value 2611 if (bytecode() == Bytecodes::_fast_lputfield) __ pop(rdx); // restore high value
2589 __ popl(rax); // restore lower value 2612 __ pop(rax); // restore lower value
2590 __ addl(rsp, sizeof(jvalue)); // release jvalue object space 2613 __ addptr(rsp, sizeof(jvalue)); // release jvalue object space
2591 __ bind(L2); 2614 __ bind(L2);
2592 } 2615 }
2593 } 2616 }
2594 2617
2595 void TemplateTable::fast_storefield(TosState state) { 2618 void TemplateTable::fast_storefield(TosState state) {
2601 2624
2602 // access constant pool cache 2625 // access constant pool cache
2603 __ get_cache_and_index_at_bcp(rcx, rbx, 1); 2626 __ get_cache_and_index_at_bcp(rcx, rbx, 1);
2604 2627
2605 // test for volatile with rdx but rdx is tos register for lputfield. 2628 // test for volatile with rdx but rdx is tos register for lputfield.
2606 if (bytecode() == Bytecodes::_fast_lputfield) __ pushl(rdx); 2629 if (bytecode() == Bytecodes::_fast_lputfield) __ push(rdx);
2607 __ movl(rdx, Address(rcx, rbx, Address::times_4, in_bytes(base + 2630 __ movl(rdx, Address(rcx, rbx, Address::times_ptr, in_bytes(base +
2608 ConstantPoolCacheEntry::flags_offset()))); 2631 ConstantPoolCacheEntry::flags_offset())));
2609 2632
2610 // replace index with field offset from cache entry 2633 // replace index with field offset from cache entry
2611 __ movl(rbx, Address(rcx, rbx, Address::times_4, in_bytes(base + ConstantPoolCacheEntry::f2_offset()))); 2634 __ movptr(rbx, Address(rcx, rbx, Address::times_ptr, in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
2612 2635
2613 // Doug Lea believes this is not needed with current Sparcs (TSO) and Intel (PSO). 2636 // Doug Lea believes this is not needed with current Sparcs (TSO) and Intel (PSO).
2614 // volatile_barrier( ); 2637 // volatile_barrier( );
2615 2638
2616 Label notVolatile, Done; 2639 Label notVolatile, Done;
2618 __ andl(rdx, 0x1); 2641 __ andl(rdx, 0x1);
2619 // Check for volatile store 2642 // Check for volatile store
2620 __ testl(rdx, rdx); 2643 __ testl(rdx, rdx);
2621 __ jcc(Assembler::zero, notVolatile); 2644 __ jcc(Assembler::zero, notVolatile);
2622 2645
2623 if (bytecode() == Bytecodes::_fast_lputfield) __ popl(rdx); 2646 if (bytecode() == Bytecodes::_fast_lputfield) __ pop(rdx);
2624 2647
2625 // Get object from stack 2648 // Get object from stack
2626 pop_and_check_object(rcx); 2649 pop_and_check_object(rcx);
2627 2650
2628 // field addresses 2651 // field addresses
2633 switch (bytecode()) { 2656 switch (bytecode()) {
2634 case Bytecodes::_fast_bputfield: __ movb(lo, rax); break; 2657 case Bytecodes::_fast_bputfield: __ movb(lo, rax); break;
2635 case Bytecodes::_fast_sputfield: // fall through 2658 case Bytecodes::_fast_sputfield: // fall through
2636 case Bytecodes::_fast_cputfield: __ movw(lo, rax); break; 2659 case Bytecodes::_fast_cputfield: __ movw(lo, rax); break;
2637 case Bytecodes::_fast_iputfield: __ movl(lo, rax); break; 2660 case Bytecodes::_fast_iputfield: __ movl(lo, rax); break;
2638 case Bytecodes::_fast_lputfield: __ movl(hi, rdx); __ movl(lo, rax); break; 2661 case Bytecodes::_fast_lputfield:
2662 NOT_LP64(__ movptr(hi, rdx));
2663 __ movptr(lo, rax);
2664 break;
2639 case Bytecodes::_fast_fputfield: __ fstp_s(lo); break; 2665 case Bytecodes::_fast_fputfield: __ fstp_s(lo); break;
2640 case Bytecodes::_fast_dputfield: __ fstp_d(lo); break; 2666 case Bytecodes::_fast_dputfield: __ fstp_d(lo); break;
2641 case Bytecodes::_fast_aputfield: __ movl(lo, rax); __ store_check(rcx, lo); break; 2667 case Bytecodes::_fast_aputfield: __ movptr(lo, rax); __ store_check(rcx, lo); break;
2642 default: 2668 default:
2643 ShouldNotReachHere(); 2669 ShouldNotReachHere();
2644 } 2670 }
2645 2671
2646 Label done; 2672 Label done;
2647 volatile_barrier( ); 2673 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2674 Assembler::StoreStore));
2648 __ jmpb(done); 2675 __ jmpb(done);
2649 2676
2650 // Same code as above, but don't need rdx to test for volatile. 2677 // Same code as above, but don't need rdx to test for volatile.
2651 __ bind(notVolatile); 2678 __ bind(notVolatile);
2652 2679
2653 if (bytecode() == Bytecodes::_fast_lputfield) __ popl(rdx); 2680 if (bytecode() == Bytecodes::_fast_lputfield) __ pop(rdx);
2654 2681
2655 // Get object from stack 2682 // Get object from stack
2656 pop_and_check_object(rcx); 2683 pop_and_check_object(rcx);
2657 2684
2658 // access field 2685 // access field
2659 switch (bytecode()) { 2686 switch (bytecode()) {
2660 case Bytecodes::_fast_bputfield: __ movb(lo, rax); break; 2687 case Bytecodes::_fast_bputfield: __ movb(lo, rax); break;
2661 case Bytecodes::_fast_sputfield: // fall through 2688 case Bytecodes::_fast_sputfield: // fall through
2662 case Bytecodes::_fast_cputfield: __ movw(lo, rax); break; 2689 case Bytecodes::_fast_cputfield: __ movw(lo, rax); break;
2663 case Bytecodes::_fast_iputfield: __ movl(lo, rax); break; 2690 case Bytecodes::_fast_iputfield: __ movl(lo, rax); break;
2664 case Bytecodes::_fast_lputfield: __ movl(hi, rdx); __ movl(lo, rax); break; 2691 case Bytecodes::_fast_lputfield:
2692 NOT_LP64(__ movptr(hi, rdx));
2693 __ movptr(lo, rax);
2694 break;
2665 case Bytecodes::_fast_fputfield: __ fstp_s(lo); break; 2695 case Bytecodes::_fast_fputfield: __ fstp_s(lo); break;
2666 case Bytecodes::_fast_dputfield: __ fstp_d(lo); break; 2696 case Bytecodes::_fast_dputfield: __ fstp_d(lo); break;
2667 case Bytecodes::_fast_aputfield: __ movl(lo, rax); __ store_check(rcx, lo); break; 2697 case Bytecodes::_fast_aputfield: __ movptr(lo, rax); __ store_check(rcx, lo); break;
2668 default: 2698 default:
2669 ShouldNotReachHere(); 2699 ShouldNotReachHere();
2670 } 2700 }
2671 __ bind(done); 2701 __ bind(done);
2672 } 2702 }
2695 } 2725 }
2696 2726
2697 // access constant pool cache 2727 // access constant pool cache
2698 __ get_cache_and_index_at_bcp(rcx, rbx, 1); 2728 __ get_cache_and_index_at_bcp(rcx, rbx, 1);
2699 // replace index with field offset from cache entry 2729 // replace index with field offset from cache entry
2700 __ movl(rbx, Address(rcx, rbx, Address::times_4, in_bytes(constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f2_offset()))); 2730 __ movptr(rbx, Address(rcx,
2731 rbx,
2732 Address::times_ptr,
2733 in_bytes(constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f2_offset())));
2701 2734
2702 2735
2703 // rax,: object 2736 // rax,: object
2704 __ verify_oop(rax); 2737 __ verify_oop(rax);
2705 __ null_check(rax); 2738 __ null_check(rax);
2707 const Address lo = Address(rax, rbx, Address::times_1, 0*wordSize); 2740 const Address lo = Address(rax, rbx, Address::times_1, 0*wordSize);
2708 const Address hi = Address(rax, rbx, Address::times_1, 1*wordSize); 2741 const Address hi = Address(rax, rbx, Address::times_1, 1*wordSize);
2709 2742
2710 // access field 2743 // access field
2711 switch (bytecode()) { 2744 switch (bytecode()) {
2712 case Bytecodes::_fast_bgetfield: __ movsxb(rax, lo ); break; 2745 case Bytecodes::_fast_bgetfield: __ movsbl(rax, lo ); break;
2713 case Bytecodes::_fast_sgetfield: __ load_signed_word(rax, lo ); break; 2746 case Bytecodes::_fast_sgetfield: __ load_signed_word(rax, lo ); break;
2714 case Bytecodes::_fast_cgetfield: __ load_unsigned_word(rax, lo ); break; 2747 case Bytecodes::_fast_cgetfield: __ load_unsigned_word(rax, lo ); break;
2715 case Bytecodes::_fast_igetfield: __ movl(rax, lo); break; 2748 case Bytecodes::_fast_igetfield: __ movl(rax, lo); break;
2716 case Bytecodes::_fast_lgetfield: __ stop("should not be rewritten"); break; 2749 case Bytecodes::_fast_lgetfield: __ stop("should not be rewritten"); break;
2717 case Bytecodes::_fast_fgetfield: __ fld_s(lo); break; 2750 case Bytecodes::_fast_fgetfield: __ fld_s(lo); break;
2718 case Bytecodes::_fast_dgetfield: __ fld_d(lo); break; 2751 case Bytecodes::_fast_dgetfield: __ fld_d(lo); break;
2719 case Bytecodes::_fast_agetfield: __ movl(rax, lo); __ verify_oop(rax); break; 2752 case Bytecodes::_fast_agetfield: __ movptr(rax, lo); __ verify_oop(rax); break;
2720 default: 2753 default:
2721 ShouldNotReachHere(); 2754 ShouldNotReachHere();
2722 } 2755 }
2723 2756
2724 // Doug Lea believes this is not needed with current Sparcs(TSO) and Intel(PSO) 2757 // Doug Lea believes this is not needed with current Sparcs(TSO) and Intel(PSO)
2726 } 2759 }
2727 2760
2728 void TemplateTable::fast_xaccess(TosState state) { 2761 void TemplateTable::fast_xaccess(TosState state) {
2729 transition(vtos, state); 2762 transition(vtos, state);
2730 // get receiver 2763 // get receiver
2731 __ movl(rax, aaddress(0)); 2764 __ movptr(rax, aaddress(0));
2732 debug_only(__ verify_local_tag(frame::TagReference, 0)); 2765 debug_only(__ verify_local_tag(frame::TagReference, 0));
2733 // access constant pool cache 2766 // access constant pool cache
2734 __ get_cache_and_index_at_bcp(rcx, rdx, 2); 2767 __ get_cache_and_index_at_bcp(rcx, rdx, 2);
2735 __ movl(rbx, Address(rcx, rdx, Address::times_4, in_bytes(constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f2_offset()))); 2768 __ movptr(rbx, Address(rcx,
2769 rdx,
2770 Address::times_ptr,
2771 in_bytes(constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f2_offset())));
2736 // make sure exception is reported in correct bcp range (getfield is next instruction) 2772 // make sure exception is reported in correct bcp range (getfield is next instruction)
2737 __ increment(rsi); 2773 __ increment(rsi);
2738 __ null_check(rax); 2774 __ null_check(rax);
2739 const Address lo = Address(rax, rbx, Address::times_1, 0*wordSize); 2775 const Address lo = Address(rax, rbx, Address::times_1, 0*wordSize);
2740 if (state == itos) { 2776 if (state == itos) {
2741 __ movl(rax, lo); 2777 __ movl(rax, lo);
2742 } else if (state == atos) { 2778 } else if (state == atos) {
2743 __ movl(rax, lo); 2779 __ movptr(rax, lo);
2744 __ verify_oop(rax); 2780 __ verify_oop(rax);
2745 } else if (state == ftos) { 2781 } else if (state == ftos) {
2746 __ fld_s(lo); 2782 __ fld_s(lo);
2747 } else { 2783 } else {
2748 ShouldNotReachHere(); 2784 ShouldNotReachHere();
2782 // load receiver if needed (note: no return address pushed yet) 2818 // load receiver if needed (note: no return address pushed yet)
2783 if (load_receiver) { 2819 if (load_receiver) {
2784 __ movl(recv, flags); 2820 __ movl(recv, flags);
2785 __ andl(recv, 0xFF); 2821 __ andl(recv, 0xFF);
2786 // recv count is 0 based? 2822 // recv count is 0 based?
2787 __ movl(recv, Address(rsp, recv, Interpreter::stackElementScale(), -Interpreter::expr_offset_in_bytes(1))); 2823 __ movptr(recv, Address(rsp, recv, Interpreter::stackElementScale(), -Interpreter::expr_offset_in_bytes(1)));
2788 __ verify_oop(recv); 2824 __ verify_oop(recv);
2789 } 2825 }
2790 2826
2791 // do null check if needed 2827 // do null check if needed
2792 if (receiver_null_check) { 2828 if (receiver_null_check) {
2793 __ null_check(recv); 2829 __ null_check(recv);
2794 } 2830 }
2795 2831
2796 if (save_flags) { 2832 if (save_flags) {
2797 __ movl(rsi, flags); 2833 __ mov(rsi, flags);
2798 } 2834 }
2799 2835
2800 // compute return type 2836 // compute return type
2801 __ shrl(flags, ConstantPoolCacheEntry::tosBits); 2837 __ shrl(flags, ConstantPoolCacheEntry::tosBits);
2802 // Make sure we don't need to mask flags for tosBits after the above shift 2838 // Make sure we don't need to mask flags for tosBits after the above shift
2803 ConstantPoolCacheEntry::verify_tosBits(); 2839 ConstantPoolCacheEntry::verify_tosBits();
2804 // load return address 2840 // load return address
2805 { const int table = 2841 {
2806 is_invokeinterface 2842 ExternalAddress table(is_invokeinterface ? (address)Interpreter::return_5_addrs_by_index_table() :
2807 ? (int)Interpreter::return_5_addrs_by_index_table() 2843 (address)Interpreter::return_3_addrs_by_index_table());
2808 : (int)Interpreter::return_3_addrs_by_index_table(); 2844 __ movptr(flags, ArrayAddress(table, Address(noreg, flags, Address::times_ptr)));
2809 __ movl(flags, Address(noreg, flags, Address::times_4, table));
2810 } 2845 }
2811 2846
2812 // push return address 2847 // push return address
2813 __ pushl(flags); 2848 __ push(flags);
2814 2849
2815 // Restore flag value from the constant pool cache, and restore rsi 2850 // Restore flag value from the constant pool cache, and restore rsi
2816 // for later null checks. rsi is the bytecode pointer 2851 // for later null checks. rsi is the bytecode pointer
2817 if (save_flags) { 2852 if (save_flags) {
2818 __ movl(flags, rsi); 2853 __ mov(flags, rsi);
2819 __ restore_bcp(); 2854 __ restore_bcp();
2820 } 2855 }
2821 } 2856 }
2822 2857
2823 2858
2850 __ bind(notFinal); 2885 __ bind(notFinal);
2851 2886
2852 // get receiver klass 2887 // get receiver klass
2853 __ null_check(recv, oopDesc::klass_offset_in_bytes()); 2888 __ null_check(recv, oopDesc::klass_offset_in_bytes());
2854 // Keep recv in rcx for callee expects it there 2889 // Keep recv in rcx for callee expects it there
2855 __ movl(rax, Address(recv, oopDesc::klass_offset_in_bytes())); 2890 __ movptr(rax, Address(recv, oopDesc::klass_offset_in_bytes()));
2856 __ verify_oop(rax); 2891 __ verify_oop(rax);
2857 2892
2858 // profile this call 2893 // profile this call
2859 __ profile_virtual_call(rax, rdi, rdx); 2894 __ profile_virtual_call(rax, rdi, rdx);
2860 2895
2861 // get target methodOop & entry point 2896 // get target methodOop & entry point
2862 const int base = instanceKlass::vtable_start_offset() * wordSize; 2897 const int base = instanceKlass::vtable_start_offset() * wordSize;
2863 assert(vtableEntry::size() * wordSize == 4, "adjust the scaling in the code below"); 2898 assert(vtableEntry::size() * wordSize == 4, "adjust the scaling in the code below");
2864 __ movl(method, Address(rax, index, Address::times_4, base + vtableEntry::method_offset_in_bytes())); 2899 __ movptr(method, Address(rax, index, Address::times_ptr, base + vtableEntry::method_offset_in_bytes()));
2865 __ jump_from_interpreted(method, rdx); 2900 __ jump_from_interpreted(method, rdx);
2866 } 2901 }
2867 2902
2868 2903
2869 void TemplateTable::invokevirtual(int byte_no) { 2904 void TemplateTable::invokevirtual(int byte_no) {
2925 invokevirtual_helper(rbx, rcx, rdx); 2960 invokevirtual_helper(rbx, rcx, rdx);
2926 __ bind(notMethod); 2961 __ bind(notMethod);
2927 2962
2928 // Get receiver klass into rdx - also a null check 2963 // Get receiver klass into rdx - also a null check
2929 __ restore_locals(); // restore rdi 2964 __ restore_locals(); // restore rdi
2930 __ movl(rdx, Address(rcx, oopDesc::klass_offset_in_bytes())); 2965 __ movptr(rdx, Address(rcx, oopDesc::klass_offset_in_bytes()));
2931 __ verify_oop(rdx); 2966 __ verify_oop(rdx);
2932 2967
2933 // profile this call 2968 // profile this call
2934 __ profile_virtual_call(rdx, rsi, rdi); 2969 __ profile_virtual_call(rdx, rsi, rdi);
2935 2970
2936 __ movl(rdi, rdx); // Save klassOop in rdi 2971 __ mov(rdi, rdx); // Save klassOop in rdi
2937 2972
2938 // Compute start of first itableOffsetEntry (which is at the end of the vtable) 2973 // Compute start of first itableOffsetEntry (which is at the end of the vtable)
2939 const int base = instanceKlass::vtable_start_offset() * wordSize; 2974 const int base = instanceKlass::vtable_start_offset() * wordSize;
2940 assert(vtableEntry::size() * wordSize == 4, "adjust the scaling in the code below"); 2975 assert(vtableEntry::size() * wordSize == (1 << (int)Address::times_ptr), "adjust the scaling in the code below");
2941 __ movl(rsi, Address(rdx, instanceKlass::vtable_length_offset() * wordSize)); // Get length of vtable 2976 __ movl(rsi, Address(rdx, instanceKlass::vtable_length_offset() * wordSize)); // Get length of vtable
2942 __ leal(rdx, Address(rdx, rsi, Address::times_4, base)); 2977 __ lea(rdx, Address(rdx, rsi, Address::times_4, base));
2943 if (HeapWordsPerLong > 1) { 2978 if (HeapWordsPerLong > 1) {
2944 // Round up to align_object_offset boundary 2979 // Round up to align_object_offset boundary
2945 __ round_to(rdx, BytesPerLong); 2980 __ round_to(rdx, BytesPerLong);
2946 } 2981 }
2947 2982
2948 Label entry, search, interface_ok; 2983 Label entry, search, interface_ok;
2949 2984
2950 __ jmpb(entry); 2985 __ jmpb(entry);
2951 __ bind(search); 2986 __ bind(search);
2952 __ addl(rdx, itableOffsetEntry::size() * wordSize); 2987 __ addptr(rdx, itableOffsetEntry::size() * wordSize);
2953 2988
2954 __ bind(entry); 2989 __ bind(entry);
2955 2990
2956 // Check that the entry is non-null. A null entry means that the receiver 2991 // Check that the entry is non-null. A null entry means that the receiver
2957 // class doesn't implement the interface, and wasn't the same as the 2992 // class doesn't implement the interface, and wasn't the same as the
2958 // receiver class checked when the interface was resolved. 2993 // receiver class checked when the interface was resolved.
2959 __ pushl(rdx); 2994 __ push(rdx);
2960 __ movl(rdx, Address(rdx, itableOffsetEntry::interface_offset_in_bytes())); 2995 __ movptr(rdx, Address(rdx, itableOffsetEntry::interface_offset_in_bytes()));
2961 __ testl(rdx, rdx); 2996 __ testptr(rdx, rdx);
2962 __ jcc(Assembler::notZero, interface_ok); 2997 __ jcc(Assembler::notZero, interface_ok);
2963 // throw exception 2998 // throw exception
2964 __ popl(rdx); // pop saved register first. 2999 __ pop(rdx); // pop saved register first.
2965 __ popl(rbx); // pop return address (pushed by prepare_invoke) 3000 __ pop(rbx); // pop return address (pushed by prepare_invoke)
2966 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed) 3001 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
2967 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed) 3002 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
2968 __ call_VM(noreg, CAST_FROM_FN_PTR(address, 3003 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2969 InterpreterRuntime::throw_IncompatibleClassChangeError)); 3004 InterpreterRuntime::throw_IncompatibleClassChangeError));
2970 // the call_VM checks for exception, so we should never return here. 3005 // the call_VM checks for exception, so we should never return here.
2971 __ should_not_reach_here(); 3006 __ should_not_reach_here();
2972 __ bind(interface_ok); 3007 __ bind(interface_ok);
2973 3008
2974 __ popl(rdx); 3009 __ pop(rdx);
2975 3010
2976 __ cmpl(rax, Address(rdx, itableOffsetEntry::interface_offset_in_bytes())); 3011 __ cmpptr(rax, Address(rdx, itableOffsetEntry::interface_offset_in_bytes()));
2977 __ jcc(Assembler::notEqual, search); 3012 __ jcc(Assembler::notEqual, search);
2978 3013
2979 __ movl(rdx, Address(rdx, itableOffsetEntry::offset_offset_in_bytes())); 3014 __ movl(rdx, Address(rdx, itableOffsetEntry::offset_offset_in_bytes()));
2980 __ addl(rdx, rdi); // Add offset to klassOop 3015 __ addptr(rdx, rdi); // Add offset to klassOop
2981 assert(itableMethodEntry::size() * wordSize == 4, "adjust the scaling in the code below"); 3016 assert(itableMethodEntry::size() * wordSize == (1 << (int)Address::times_ptr), "adjust the scaling in the code below");
2982 __ movl(rbx, Address(rdx, rbx, Address::times_4)); 3017 __ movptr(rbx, Address(rdx, rbx, Address::times_ptr));
2983 // rbx,: methodOop to call 3018 // rbx,: methodOop to call
2984 // rcx: receiver 3019 // rcx: receiver
2985 // Check for abstract method error 3020 // Check for abstract method error
2986 // Note: This should be done more efficiently via a throw_abstract_method_error 3021 // Note: This should be done more efficiently via a throw_abstract_method_error
2987 // interpreter entry point and a conditional jump to it in case of a null 3022 // interpreter entry point and a conditional jump to it in case of a null
2988 // method. 3023 // method.
2989 { Label L; 3024 { Label L;
2990 __ testl(rbx, rbx); 3025 __ testptr(rbx, rbx);
2991 __ jcc(Assembler::notZero, L); 3026 __ jcc(Assembler::notZero, L);
2992 // throw exception 3027 // throw exception
2993 // note: must restore interpreter registers to canonical 3028 // note: must restore interpreter registers to canonical
2994 // state for exception handling to work correctly! 3029 // state for exception handling to work correctly!
2995 __ popl(rbx); // pop return address (pushed by prepare_invoke) 3030 __ pop(rbx); // pop return address (pushed by prepare_invoke)
2996 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed) 3031 __ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
2997 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed) 3032 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
2998 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError)); 3033 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError));
2999 // the call_VM checks for exception, so we should never return here. 3034 // the call_VM checks for exception, so we should never return here.
3000 __ should_not_reach_here(); 3035 __ should_not_reach_here();
3021 3056
3022 ExternalAddress heap_top((address)Universe::heap()->top_addr()); 3057 ExternalAddress heap_top((address)Universe::heap()->top_addr());
3023 3058
3024 __ get_cpool_and_tags(rcx, rax); 3059 __ get_cpool_and_tags(rcx, rax);
3025 // get instanceKlass 3060 // get instanceKlass
3026 __ movl(rcx, Address(rcx, rdx, Address::times_4, sizeof(constantPoolOopDesc))); 3061 __ movptr(rcx, Address(rcx, rdx, Address::times_ptr, sizeof(constantPoolOopDesc)));
3027 __ pushl(rcx); // save the contexts of klass for initializing the header 3062 __ push(rcx); // save the contexts of klass for initializing the header
3028 3063
3029 // make sure the class we're about to instantiate has been resolved. 3064 // make sure the class we're about to instantiate has been resolved.
3030 // Note: slow_case does a pop of stack, which is why we loaded class/pushed above 3065 // Note: slow_case does a pop of stack, which is why we loaded class/pushed above
3031 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; 3066 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize;
3032 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class); 3067 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3055 3090
3056 if (UseTLAB) { 3091 if (UseTLAB) {
3057 const Register thread = rcx; 3092 const Register thread = rcx;
3058 3093
3059 __ get_thread(thread); 3094 __ get_thread(thread);
3060 __ movl(rax, Address(thread, in_bytes(JavaThread::tlab_top_offset()))); 3095 __ movptr(rax, Address(thread, in_bytes(JavaThread::tlab_top_offset())));
3061 __ leal(rbx, Address(rax, rdx, Address::times_1)); 3096 __ lea(rbx, Address(rax, rdx, Address::times_1));
3062 __ cmpl(rbx, Address(thread, in_bytes(JavaThread::tlab_end_offset()))); 3097 __ cmpptr(rbx, Address(thread, in_bytes(JavaThread::tlab_end_offset())));
3063 __ jcc(Assembler::above, allow_shared_alloc ? allocate_shared : slow_case); 3098 __ jcc(Assembler::above, allow_shared_alloc ? allocate_shared : slow_case);
3064 __ movl(Address(thread, in_bytes(JavaThread::tlab_top_offset())), rbx); 3099 __ movptr(Address(thread, in_bytes(JavaThread::tlab_top_offset())), rbx);
3065 if (ZeroTLAB) { 3100 if (ZeroTLAB) {
3066 // the fields have been already cleared 3101 // the fields have been already cleared
3067 __ jmp(initialize_header); 3102 __ jmp(initialize_header);
3068 } else { 3103 } else {
3069 // initialize both the header and fields 3104 // initialize both the header and fields
3077 if (allow_shared_alloc) { 3112 if (allow_shared_alloc) {
3078 __ bind(allocate_shared); 3113 __ bind(allocate_shared);
3079 3114
3080 Label retry; 3115 Label retry;
3081 __ bind(retry); 3116 __ bind(retry);
3082 __ mov32(rax, heap_top); 3117 __ movptr(rax, heap_top);
3083 __ leal(rbx, Address(rax, rdx, Address::times_1)); 3118 __ lea(rbx, Address(rax, rdx, Address::times_1));
3084 __ cmp32(rbx, ExternalAddress((address)Universe::heap()->end_addr())); 3119 __ cmpptr(rbx, ExternalAddress((address)Universe::heap()->end_addr()));
3085 __ jcc(Assembler::above, slow_case); 3120 __ jcc(Assembler::above, slow_case);
3086 3121
3087 // Compare rax, with the top addr, and if still equal, store the new 3122 // Compare rax, with the top addr, and if still equal, store the new
3088 // top addr in rbx, at the address of the top addr pointer. Sets ZF if was 3123 // top addr in rbx, at the address of the top addr pointer. Sets ZF if was
3089 // equal, and clears it otherwise. Use lock prefix for atomicity on MPs. 3124 // equal, and clears it otherwise. Use lock prefix for atomicity on MPs.
3090 // 3125 //
3091 // rax,: object begin 3126 // rax,: object begin
3092 // rbx,: object end 3127 // rbx,: object end
3093 // rdx: instance size in bytes 3128 // rdx: instance size in bytes
3094 if (os::is_MP()) __ lock(); 3129 __ locked_cmpxchgptr(rbx, heap_top);
3095 __ cmpxchgptr(rbx, heap_top);
3096 3130
3097 // if someone beat us on the allocation, try again, otherwise continue 3131 // if someone beat us on the allocation, try again, otherwise continue
3098 __ jcc(Assembler::notEqual, retry); 3132 __ jcc(Assembler::notEqual, retry);
3099 } 3133 }
3100 3134
3122 #endif 3156 #endif
3123 3157
3124 // initialize remaining object fields: rdx was a multiple of 8 3158 // initialize remaining object fields: rdx was a multiple of 8
3125 { Label loop; 3159 { Label loop;
3126 __ bind(loop); 3160 __ bind(loop);
3127 __ movl(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx); 3161 __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
3128 __ movl(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx); 3162 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
3129 __ decrement(rdx); 3163 __ decrement(rdx);
3130 __ jcc(Assembler::notZero, loop); 3164 __ jcc(Assembler::notZero, loop);
3131 } 3165 }
3132 3166
3133 // initialize object header only. 3167 // initialize object header only.
3134 __ bind(initialize_header); 3168 __ bind(initialize_header);
3135 if (UseBiasedLocking) { 3169 if (UseBiasedLocking) {
3136 __ popl(rcx); // get saved klass back in the register. 3170 __ pop(rcx); // get saved klass back in the register.
3137 __ movl(rbx, Address(rcx, Klass::prototype_header_offset_in_bytes() + klassOopDesc::klass_part_offset_in_bytes())); 3171 __ movptr(rbx, Address(rcx, Klass::prototype_header_offset_in_bytes() + klassOopDesc::klass_part_offset_in_bytes()));
3138 __ movl(Address(rax, oopDesc::mark_offset_in_bytes ()), rbx); 3172 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()), rbx);
3139 } else { 3173 } else {
3140 __ movl(Address(rax, oopDesc::mark_offset_in_bytes ()), 3174 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()),
3141 (int)markOopDesc::prototype()); // header 3175 (int32_t)markOopDesc::prototype()); // header
3142 __ popl(rcx); // get saved klass back in the register. 3176 __ pop(rcx); // get saved klass back in the register.
3143 } 3177 }
3144 __ movl(Address(rax, oopDesc::klass_offset_in_bytes()), rcx); // klass 3178 __ movptr(Address(rax, oopDesc::klass_offset_in_bytes()), rcx); // klass
3145 3179
3146 { 3180 {
3147 SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0); 3181 SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0);
3148 // Trigger dtrace event for fastpath 3182 // Trigger dtrace event for fastpath
3149 __ push(atos); 3183 __ push(atos);
3155 __ jmp(done); 3189 __ jmp(done);
3156 } 3190 }
3157 3191
3158 // slow case 3192 // slow case
3159 __ bind(slow_case); 3193 __ bind(slow_case);
3160 __ popl(rcx); // restore stack pointer to what it was when we came in. 3194 __ pop(rcx); // restore stack pointer to what it was when we came in.
3161 __ get_constant_pool(rax); 3195 __ get_constant_pool(rax);
3162 __ get_unsigned_2_byte_index_at_bcp(rdx, 1); 3196 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3163 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rax, rdx); 3197 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), rax, rdx);
3164 3198
3165 // continue 3199 // continue
3192 3226
3193 3227
3194 void TemplateTable::checkcast() { 3228 void TemplateTable::checkcast() {
3195 transition(atos, atos); 3229 transition(atos, atos);
3196 Label done, is_null, ok_is_subtype, quicked, resolved; 3230 Label done, is_null, ok_is_subtype, quicked, resolved;
3197 __ testl(rax, rax); // Object is in EAX 3231 __ testptr(rax, rax); // Object is in EAX
3198 __ jcc(Assembler::zero, is_null); 3232 __ jcc(Assembler::zero, is_null);
3199 3233
3200 // Get cpool & tags index 3234 // Get cpool & tags index
3201 __ get_cpool_and_tags(rcx, rdx); // ECX=cpool, EDX=tags array 3235 __ get_cpool_and_tags(rcx, rdx); // ECX=cpool, EDX=tags array
3202 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // EBX=index 3236 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // EBX=index
3209 __ pop_ptr(rdx); 3243 __ pop_ptr(rdx);
3210 __ jmpb(resolved); 3244 __ jmpb(resolved);
3211 3245
3212 // Get superklass in EAX and subklass in EBX 3246 // Get superklass in EAX and subklass in EBX
3213 __ bind(quicked); 3247 __ bind(quicked);
3214 __ movl(rdx, rax); // Save object in EDX; EAX needed for subtype check 3248 __ mov(rdx, rax); // Save object in EDX; EAX needed for subtype check
3215 __ movl(rax, Address(rcx, rbx, Address::times_4, sizeof(constantPoolOopDesc))); 3249 __ movptr(rax, Address(rcx, rbx, Address::times_ptr, sizeof(constantPoolOopDesc)));
3216 3250
3217 __ bind(resolved); 3251 __ bind(resolved);
3218 __ movl(rbx, Address(rdx, oopDesc::klass_offset_in_bytes())); 3252 __ movptr(rbx, Address(rdx, oopDesc::klass_offset_in_bytes()));
3219 3253
3220 // Generate subtype check. Blows ECX. Resets EDI. Object in EDX. 3254 // Generate subtype check. Blows ECX. Resets EDI. Object in EDX.
3221 // Superklass in EAX. Subklass in EBX. 3255 // Superklass in EAX. Subklass in EBX.
3222 __ gen_subtype_check( rbx, ok_is_subtype ); 3256 __ gen_subtype_check( rbx, ok_is_subtype );
3223 3257
3224 // Come here on failure 3258 // Come here on failure
3225 __ pushl(rdx); 3259 __ push(rdx);
3226 // object is at TOS 3260 // object is at TOS
3227 __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry)); 3261 __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry));
3228 3262
3229 // Come here on success 3263 // Come here on success
3230 __ bind(ok_is_subtype); 3264 __ bind(ok_is_subtype);
3231 __ movl(rax,rdx); // Restore object in EDX 3265 __ mov(rax,rdx); // Restore object in EDX
3232 3266
3233 // Collect counts on whether this check-cast sees NULLs a lot or not. 3267 // Collect counts on whether this check-cast sees NULLs a lot or not.
3234 if (ProfileInterpreter) { 3268 if (ProfileInterpreter) {
3235 __ jmp(done); 3269 __ jmp(done);
3236 __ bind(is_null); 3270 __ bind(is_null);
3243 3277
3244 3278
3245 void TemplateTable::instanceof() { 3279 void TemplateTable::instanceof() {
3246 transition(atos, itos); 3280 transition(atos, itos);
3247 Label done, is_null, ok_is_subtype, quicked, resolved; 3281 Label done, is_null, ok_is_subtype, quicked, resolved;
3248 __ testl(rax, rax); 3282 __ testptr(rax, rax);
3249 __ jcc(Assembler::zero, is_null); 3283 __ jcc(Assembler::zero, is_null);
3250 3284
3251 // Get cpool & tags index 3285 // Get cpool & tags index
3252 __ get_cpool_and_tags(rcx, rdx); // ECX=cpool, EDX=tags array 3286 __ get_cpool_and_tags(rcx, rdx); // ECX=cpool, EDX=tags array
3253 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // EBX=index 3287 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // EBX=index
3256 __ jcc(Assembler::equal, quicked); 3290 __ jcc(Assembler::equal, quicked);
3257 3291
3258 __ push(atos); 3292 __ push(atos);
3259 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) ); 3293 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3260 __ pop_ptr(rdx); 3294 __ pop_ptr(rdx);
3261 __ movl(rdx, Address(rdx, oopDesc::klass_offset_in_bytes())); 3295 __ movptr(rdx, Address(rdx, oopDesc::klass_offset_in_bytes()));
3262 __ jmp(resolved); 3296 __ jmp(resolved);
3263 3297
3264 // Get superklass in EAX and subklass in EDX 3298 // Get superklass in EAX and subklass in EDX
3265 __ bind(quicked); 3299 __ bind(quicked);
3266 __ movl(rdx, Address(rax, oopDesc::klass_offset_in_bytes())); 3300 __ movptr(rdx, Address(rax, oopDesc::klass_offset_in_bytes()));
3267 __ movl(rax, Address(rcx, rbx, Address::times_4, sizeof(constantPoolOopDesc))); 3301 __ movptr(rax, Address(rcx, rbx, Address::times_ptr, sizeof(constantPoolOopDesc)));
3268 3302
3269 __ bind(resolved); 3303 __ bind(resolved);
3270 3304
3271 // Generate subtype check. Blows ECX. Resets EDI. 3305 // Generate subtype check. Blows ECX. Resets EDI.
3272 // Superklass in EAX. Subklass in EDX. 3306 // Superklass in EAX. Subklass in EDX.
3304 transition(vtos, vtos); 3338 transition(vtos, vtos);
3305 3339
3306 // get the unpatched byte code 3340 // get the unpatched byte code
3307 __ get_method(rcx); 3341 __ get_method(rcx);
3308 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::get_original_bytecode_at), rcx, rsi); 3342 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::get_original_bytecode_at), rcx, rsi);
3309 __ movl(rbx, rax); 3343 __ mov(rbx, rax);
3310 3344
3311 // post the breakpoint event 3345 // post the breakpoint event
3312 __ get_method(rcx); 3346 __ get_method(rcx);
3313 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::_breakpoint), rcx, rsi); 3347 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::_breakpoint), rcx, rsi);
3314 3348
3360 // initialize entry pointer 3394 // initialize entry pointer
3361 __ xorl(rdx, rdx); // points to free slot or NULL 3395 __ xorl(rdx, rdx); // points to free slot or NULL
3362 3396
3363 // find a free slot in the monitor block (result in rdx) 3397 // find a free slot in the monitor block (result in rdx)
3364 { Label entry, loop, exit; 3398 { Label entry, loop, exit;
3365 __ movl(rcx, monitor_block_top); // points to current entry, starting with top-most entry 3399 __ movptr(rcx, monitor_block_top); // points to current entry, starting with top-most entry
3366 __ leal(rbx, monitor_block_bot); // points to word before bottom of monitor block 3400 __ lea(rbx, monitor_block_bot); // points to word before bottom of monitor block
3367 __ jmpb(entry); 3401 __ jmpb(entry);
3368 3402
3369 __ bind(loop); 3403 __ bind(loop);
3370 __ cmpl(Address(rcx, BasicObjectLock::obj_offset_in_bytes()), NULL_WORD); // check if current entry is used 3404 __ cmpptr(Address(rcx, BasicObjectLock::obj_offset_in_bytes()), (int32_t)NULL_WORD); // check if current entry is used
3371 3405
3372 // TODO - need new func here - kbt 3406 // TODO - need new func here - kbt
3373 if (VM_Version::supports_cmov()) { 3407 if (VM_Version::supports_cmov()) {
3374 __ cmovl(Assembler::equal, rdx, rcx); // if not used then remember entry in rdx 3408 __ cmov(Assembler::equal, rdx, rcx); // if not used then remember entry in rdx
3375 } else { 3409 } else {
3376 Label L; 3410 Label L;
3377 __ jccb(Assembler::notEqual, L); 3411 __ jccb(Assembler::notEqual, L);
3378 __ movl(rdx, rcx); // if not used then remember entry in rdx 3412 __ mov(rdx, rcx); // if not used then remember entry in rdx
3379 __ bind(L); 3413 __ bind(L);
3380 } 3414 }
3381 __ cmpl(rax, Address(rcx, BasicObjectLock::obj_offset_in_bytes())); // check if current entry is for same object 3415 __ cmpptr(rax, Address(rcx, BasicObjectLock::obj_offset_in_bytes())); // check if current entry is for same object
3382 __ jccb(Assembler::equal, exit); // if same object then stop searching 3416 __ jccb(Assembler::equal, exit); // if same object then stop searching
3383 __ addl(rcx, entry_size); // otherwise advance to next entry 3417 __ addptr(rcx, entry_size); // otherwise advance to next entry
3384 __ bind(entry); 3418 __ bind(entry);
3385 __ cmpl(rcx, rbx); // check if bottom reached 3419 __ cmpptr(rcx, rbx); // check if bottom reached
3386 __ jcc(Assembler::notEqual, loop); // if not at bottom then check this entry 3420 __ jcc(Assembler::notEqual, loop); // if not at bottom then check this entry
3387 __ bind(exit); 3421 __ bind(exit);
3388 } 3422 }
3389 3423
3390 __ testl(rdx, rdx); // check if a slot has been found 3424 __ testptr(rdx, rdx); // check if a slot has been found
3391 __ jccb(Assembler::notZero, allocated); // if found, continue with that one 3425 __ jccb(Assembler::notZero, allocated); // if found, continue with that one
3392 3426
3393 // allocate one if there's no free slot 3427 // allocate one if there's no free slot
3394 { Label entry, loop; 3428 { Label entry, loop;
3395 // 1. compute new pointers // rsp: old expression stack top 3429 // 1. compute new pointers // rsp: old expression stack top
3396 __ movl(rdx, monitor_block_bot); // rdx: old expression stack bottom 3430 __ movptr(rdx, monitor_block_bot); // rdx: old expression stack bottom
3397 __ subl(rsp, entry_size); // move expression stack top 3431 __ subptr(rsp, entry_size); // move expression stack top
3398 __ subl(rdx, entry_size); // move expression stack bottom 3432 __ subptr(rdx, entry_size); // move expression stack bottom
3399 __ movl(rcx, rsp); // set start value for copy loop 3433 __ mov(rcx, rsp); // set start value for copy loop
3400 __ movl(monitor_block_bot, rdx); // set new monitor block top 3434 __ movptr(monitor_block_bot, rdx); // set new monitor block top
3401 __ jmp(entry); 3435 __ jmp(entry);
3402 // 2. move expression stack contents 3436 // 2. move expression stack contents
3403 __ bind(loop); 3437 __ bind(loop);
3404 __ movl(rbx, Address(rcx, entry_size)); // load expression stack word from old location 3438 __ movptr(rbx, Address(rcx, entry_size)); // load expression stack word from old location
3405 __ movl(Address(rcx, 0), rbx); // and store it at new location 3439 __ movptr(Address(rcx, 0), rbx); // and store it at new location
3406 __ addl(rcx, wordSize); // advance to next word 3440 __ addptr(rcx, wordSize); // advance to next word
3407 __ bind(entry); 3441 __ bind(entry);
3408 __ cmpl(rcx, rdx); // check if bottom reached 3442 __ cmpptr(rcx, rdx); // check if bottom reached
3409 __ jcc(Assembler::notEqual, loop); // if not at bottom then copy next word 3443 __ jcc(Assembler::notEqual, loop); // if not at bottom then copy next word
3410 } 3444 }
3411 3445
3412 // call run-time routine 3446 // call run-time routine
3413 // rdx: points to monitor entry 3447 // rdx: points to monitor entry
3415 3449
3416 // Increment bcp to point to the next bytecode, so exception handling for async. exceptions work correctly. 3450 // Increment bcp to point to the next bytecode, so exception handling for async. exceptions work correctly.
3417 // The object has already been poped from the stack, so the expression stack looks correct. 3451 // The object has already been poped from the stack, so the expression stack looks correct.
3418 __ increment(rsi); 3452 __ increment(rsi);
3419 3453
3420 __ movl(Address(rdx, BasicObjectLock::obj_offset_in_bytes()), rax); // store object 3454 __ movptr(Address(rdx, BasicObjectLock::obj_offset_in_bytes()), rax); // store object
3421 __ lock_object(rdx); 3455 __ lock_object(rdx);
3422 3456
3423 // check to make sure this monitor doesn't cause stack overflow after locking 3457 // check to make sure this monitor doesn't cause stack overflow after locking
3424 __ save_bcp(); // in case of exception 3458 __ save_bcp(); // in case of exception
3425 __ generate_stack_overflow_check(0); 3459 __ generate_stack_overflow_check(0);
3440 const int entry_size = ( frame::interpreter_frame_monitor_size() * wordSize); 3474 const int entry_size = ( frame::interpreter_frame_monitor_size() * wordSize);
3441 Label found; 3475 Label found;
3442 3476
3443 // find matching slot 3477 // find matching slot
3444 { Label entry, loop; 3478 { Label entry, loop;
3445 __ movl(rdx, monitor_block_top); // points to current entry, starting with top-most entry 3479 __ movptr(rdx, monitor_block_top); // points to current entry, starting with top-most entry
3446 __ leal(rbx, monitor_block_bot); // points to word before bottom of monitor block 3480 __ lea(rbx, monitor_block_bot); // points to word before bottom of monitor block
3447 __ jmpb(entry); 3481 __ jmpb(entry);
3448 3482
3449 __ bind(loop); 3483 __ bind(loop);
3450 __ cmpl(rax, Address(rdx, BasicObjectLock::obj_offset_in_bytes())); // check if current entry is for same object 3484 __ cmpptr(rax, Address(rdx, BasicObjectLock::obj_offset_in_bytes())); // check if current entry is for same object
3451 __ jcc(Assembler::equal, found); // if same object then stop searching 3485 __ jcc(Assembler::equal, found); // if same object then stop searching
3452 __ addl(rdx, entry_size); // otherwise advance to next entry 3486 __ addptr(rdx, entry_size); // otherwise advance to next entry
3453 __ bind(entry); 3487 __ bind(entry);
3454 __ cmpl(rdx, rbx); // check if bottom reached 3488 __ cmpptr(rdx, rbx); // check if bottom reached
3455 __ jcc(Assembler::notEqual, loop); // if not at bottom then check this entry 3489 __ jcc(Assembler::notEqual, loop); // if not at bottom then check this entry
3456 } 3490 }
3457 3491
3458 // error handling. Unlocking was not block-structured 3492 // error handling. Unlocking was not block-structured
3459 Label end; 3493 Label end;
3474 // Wide instructions 3508 // Wide instructions
3475 3509
3476 void TemplateTable::wide() { 3510 void TemplateTable::wide() {
3477 transition(vtos, vtos); 3511 transition(vtos, vtos);
3478 __ load_unsigned_byte(rbx, at_bcp(1)); 3512 __ load_unsigned_byte(rbx, at_bcp(1));
3479 __ jmp(Address(noreg, rbx, Address::times_4, int(Interpreter::_wentry_point))); 3513 ExternalAddress wtable((address)Interpreter::_wentry_point);
3514 __ jump(ArrayAddress(wtable, Address(noreg, rbx, Address::times_ptr)));
3480 // Note: the rsi increment step is part of the individual wide bytecode implementations 3515 // Note: the rsi increment step is part of the individual wide bytecode implementations
3481 } 3516 }
3482 3517
3483 3518
3484 //---------------------------------------------------------------------------------------------------- 3519 //----------------------------------------------------------------------------------------------------
3488 transition(vtos, atos); 3523 transition(vtos, atos);
3489 __ load_unsigned_byte(rax, at_bcp(3)); // get number of dimensions 3524 __ load_unsigned_byte(rax, at_bcp(3)); // get number of dimensions
3490 // last dim is on top of stack; we want address of first one: 3525 // last dim is on top of stack; we want address of first one:
3491 // first_addr = last_addr + (ndims - 1) * stackElementSize - 1*wordsize 3526 // first_addr = last_addr + (ndims - 1) * stackElementSize - 1*wordsize
3492 // the latter wordSize to point to the beginning of the array. 3527 // the latter wordSize to point to the beginning of the array.
3493 __ leal( rax, Address(rsp, rax, Interpreter::stackElementScale(), -wordSize)); 3528 __ lea( rax, Address(rsp, rax, Interpreter::stackElementScale(), -wordSize));
3494 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::multianewarray), rax); // pass in rax, 3529 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::multianewarray), rax); // pass in rax,
3495 __ load_unsigned_byte(rbx, at_bcp(3)); 3530 __ load_unsigned_byte(rbx, at_bcp(3));
3496 __ leal(rsp, Address(rsp, rbx, Interpreter::stackElementScale())); // get rid of counts 3531 __ lea(rsp, Address(rsp, rbx, Interpreter::stackElementScale())); // get rid of counts
3497 } 3532 }
3498 3533
3499 #endif /* !CC_INTERP */ 3534 #endif /* !CC_INTERP */