comparison src/cpu/x86/vm/templateTable_x86_64.cpp @ 304:dc7f315e41f7

5108146: Merge i486 and amd64 cpu directories 6459804: Want client (c1) compiler for x86_64 (amd64) for faster start-up Reviewed-by: kvn
author never
date Wed, 27 Aug 2008 00:21:55 -0700
parents d1605aabd0a1
children f8199438385b
comparison
equal deleted inserted replaced
303:fa4d1d240383 304:dc7f315e41f7
23 */ 23 */
24 24
25 #include "incls/_precompiled.incl" 25 #include "incls/_precompiled.incl"
26 #include "incls/_templateTable_x86_64.cpp.incl" 26 #include "incls/_templateTable_x86_64.cpp.incl"
27 27
28 #ifndef CC_INTERP
29
28 #define __ _masm-> 30 #define __ _masm->
29 31
30 // Platform-dependent initialization 32 // Platform-dependent initialization
31 33
32 void TemplateTable::pd_initialize() { 34 void TemplateTable::pd_initialize() {
315 __ movl(rax, Address(rcx, rbx, Address::times_8, base_offset)); 317 __ movl(rax, Address(rcx, rbx, Address::times_8, base_offset));
316 __ push_i(rax); 318 __ push_i(rax);
317 __ jmp(Done); 319 __ jmp(Done);
318 320
319 __ bind(isOop); 321 __ bind(isOop);
320 __ movq(rax, Address(rcx, rbx, Address::times_8, base_offset)); 322 __ movptr(rax, Address(rcx, rbx, Address::times_8, base_offset));
321 __ push_ptr(rax); 323 __ push_ptr(rax);
322 324
323 if (VerifyOops) { 325 if (VerifyOops) {
324 __ verify_oop(rax); 326 __ verify_oop(rax);
325 } 327 }
353 __ bind(Done); 355 __ bind(Done);
354 } 356 }
355 357
356 void TemplateTable::locals_index(Register reg, int offset) { 358 void TemplateTable::locals_index(Register reg, int offset) {
357 __ load_unsigned_byte(reg, at_bcp(offset)); 359 __ load_unsigned_byte(reg, at_bcp(offset));
358 __ negq(reg); 360 __ negptr(reg);
359 if (TaggedStackInterpreter) __ shlq(reg, 1); // index = index*2 361 if (TaggedStackInterpreter) __ shlptr(reg, 1); // index = index*2
360 } 362 }
361 363
362 void TemplateTable::iload() { 364 void TemplateTable::iload() {
363 transition(vtos, itos); 365 transition(vtos, itos);
364 if (RewriteFrequentPairs) { 366 if (RewriteFrequentPairs) {
441 } 443 }
442 444
443 void TemplateTable::aload() { 445 void TemplateTable::aload() {
444 transition(vtos, atos); 446 transition(vtos, atos);
445 locals_index(rbx); 447 locals_index(rbx);
446 __ movq(rax, aaddress(rbx)); 448 __ movptr(rax, aaddress(rbx));
447 debug_only(__ verify_local_tag(frame::TagReference, rbx)); 449 debug_only(__ verify_local_tag(frame::TagReference, rbx));
448 } 450 }
449 451
450 void TemplateTable::locals_index_wide(Register reg) { 452 void TemplateTable::locals_index_wide(Register reg) {
451 __ movl(reg, at_bcp(2)); 453 __ movl(reg, at_bcp(2));
452 __ bswapl(reg); 454 __ bswapl(reg);
453 __ shrl(reg, 16); 455 __ shrl(reg, 16);
454 __ negq(reg); 456 __ negptr(reg);
455 if (TaggedStackInterpreter) __ shlq(reg, 1); // index = index*2 457 if (TaggedStackInterpreter) __ shlptr(reg, 1); // index = index*2
456 } 458 }
457 459
458 void TemplateTable::wide_iload() { 460 void TemplateTable::wide_iload() {
459 transition(vtos, itos); 461 transition(vtos, itos);
460 locals_index_wide(rbx); 462 locals_index_wide(rbx);
484 } 486 }
485 487
486 void TemplateTable::wide_aload() { 488 void TemplateTable::wide_aload() {
487 transition(vtos, atos); 489 transition(vtos, atos);
488 locals_index_wide(rbx); 490 locals_index_wide(rbx);
489 __ movq(rax, aaddress(rbx)); 491 __ movptr(rax, aaddress(rbx));
490 debug_only(__ verify_local_tag(frame::TagReference, rbx)); 492 debug_only(__ verify_local_tag(frame::TagReference, rbx));
491 } 493 }
492 494
493 void TemplateTable::index_check(Register array, Register index) { 495 void TemplateTable::index_check(Register array, Register index) {
494 // destroys rbx 496 // destroys rbx
495 // check array 497 // check array
496 __ null_check(array, arrayOopDesc::length_offset_in_bytes()); 498 __ null_check(array, arrayOopDesc::length_offset_in_bytes());
497 // sign extend index for use by indexed load 499 // sign extend index for use by indexed load
498 __ movslq(index, index); 500 __ movl2ptr(index, index);
499 // check index 501 // check index
500 __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes())); 502 __ cmpl(index, Address(array, arrayOopDesc::length_offset_in_bytes()));
501 if (index != rbx) { 503 if (index != rbx) {
502 // ??? convention: move aberrant index into ebx for exception message 504 // ??? convention: move aberrant index into ebx for exception message
503 assert(rbx != array, "different registers"); 505 assert(rbx != array, "different registers");
640 debug_only(__ verify_local_tag(frame::TagCategory2, n)); 642 debug_only(__ verify_local_tag(frame::TagCategory2, n));
641 } 643 }
642 644
643 void TemplateTable::aload(int n) { 645 void TemplateTable::aload(int n) {
644 transition(vtos, atos); 646 transition(vtos, atos);
645 __ movq(rax, aaddress(n)); 647 __ movptr(rax, aaddress(n));
646 debug_only(__ verify_local_tag(frame::TagReference, n)); 648 debug_only(__ verify_local_tag(frame::TagReference, n));
647 } 649 }
648 650
649 void TemplateTable::aload_0() { 651 void TemplateTable::aload_0() {
650 transition(vtos, atos); 652 transition(vtos, atos);
755 757
756 void TemplateTable::astore() { 758 void TemplateTable::astore() {
757 transition(vtos, vtos); 759 transition(vtos, vtos);
758 __ pop_ptr(rax, rdx); // will need to pop tag too 760 __ pop_ptr(rax, rdx); // will need to pop tag too
759 locals_index(rbx); 761 locals_index(rbx);
760 __ movq(aaddress(rbx), rax); 762 __ movptr(aaddress(rbx), rax);
761 __ tag_local(rdx, rbx); // store tag from stack, might be returnAddr 763 __ tag_local(rdx, rbx); // store tag from stack, might be returnAddr
762 } 764 }
763 765
764 void TemplateTable::wide_istore() { 766 void TemplateTable::wide_istore() {
765 transition(vtos, vtos); 767 transition(vtos, vtos);
795 797
796 void TemplateTable::wide_astore() { 798 void TemplateTable::wide_astore() {
797 transition(vtos, vtos); 799 transition(vtos, vtos);
798 __ pop_ptr(rax, rdx); // will need to pop tag too 800 __ pop_ptr(rax, rdx); // will need to pop tag too
799 locals_index_wide(rbx); 801 locals_index_wide(rbx);
800 __ movq(aaddress(rbx), rax); 802 __ movptr(aaddress(rbx), rax);
801 __ tag_local(rdx, rbx); // store tag from stack, might be returnAddr 803 __ tag_local(rdx, rbx); // store tag from stack, might be returnAddr
802 } 804 }
803 805
804 void TemplateTable::iastore() { 806 void TemplateTable::iastore() {
805 transition(itos, vtos); 807 transition(itos, vtos);
859 861
860 void TemplateTable::aastore() { 862 void TemplateTable::aastore() {
861 Label is_null, ok_is_subtype, done; 863 Label is_null, ok_is_subtype, done;
862 transition(vtos, vtos); 864 transition(vtos, vtos);
863 // stack: ..., array, index, value 865 // stack: ..., array, index, value
864 __ movq(rax, at_tos()); // value 866 __ movptr(rax, at_tos()); // value
865 __ movl(rcx, at_tos_p1()); // index 867 __ movl(rcx, at_tos_p1()); // index
866 __ movq(rdx, at_tos_p2()); // array 868 __ movptr(rdx, at_tos_p2()); // array
867 index_check(rdx, rcx); // kills rbx 869 index_check(rdx, rcx); // kills rbx
868 // do array store check - check for NULL value first 870 // do array store check - check for NULL value first
869 __ testq(rax, rax); 871 __ testptr(rax, rax);
870 __ jcc(Assembler::zero, is_null); 872 __ jcc(Assembler::zero, is_null);
871 873
872 // Move subklass into rbx 874 // Move subklass into rbx
873 __ load_klass(rbx, rax); 875 __ load_klass(rbx, rax);
874 // Move superklass into rax 876 // Move superklass into rax
875 __ load_klass(rax, rdx); 877 __ load_klass(rax, rdx);
876 __ movq(rax, Address(rax, 878 __ movptr(rax, Address(rax,
877 sizeof(oopDesc) + 879 sizeof(oopDesc) +
878 objArrayKlass::element_klass_offset_in_bytes())); 880 objArrayKlass::element_klass_offset_in_bytes()));
879 // Compress array + index*oopSize + 12 into a single register. Frees rcx. 881 // Compress array + index*oopSize + 12 into a single register. Frees rcx.
880 __ leaq(rdx, Address(rdx, rcx, 882 __ lea(rdx, Address(rdx, rcx,
881 UseCompressedOops ? Address::times_4 : Address::times_8, 883 UseCompressedOops ? Address::times_4 : Address::times_8,
882 arrayOopDesc::base_offset_in_bytes(T_OBJECT))); 884 arrayOopDesc::base_offset_in_bytes(T_OBJECT)));
883 885
884 // Generate subtype check. Blows rcx, rdi 886 // Generate subtype check. Blows rcx, rdi
885 // Superklass in rax. Subklass in rbx. 887 // Superklass in rax. Subklass in rbx.
886 __ gen_subtype_check(rbx, ok_is_subtype); 888 __ gen_subtype_check(rbx, ok_is_subtype);
887 889
889 // object is at TOS 891 // object is at TOS
890 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry)); 892 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
891 893
892 // Come here on success 894 // Come here on success
893 __ bind(ok_is_subtype); 895 __ bind(ok_is_subtype);
894 __ movq(rax, at_tos()); // Value 896 __ movptr(rax, at_tos()); // Value
895 __ store_heap_oop(Address(rdx, 0), rax); 897 __ store_heap_oop(Address(rdx, 0), rax);
896 __ store_check(rdx); 898 __ store_check(rdx);
897 __ jmp(done); 899 __ jmp(done);
898 900
899 // Have a NULL in rax, rdx=array, ecx=index. Store NULL at ary[idx] 901 // Have a NULL in rax, rdx=array, ecx=index. Store NULL at ary[idx]
904 arrayOopDesc::base_offset_in_bytes(T_OBJECT)), 906 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
905 rax); 907 rax);
906 908
907 // Pop stack arguments 909 // Pop stack arguments
908 __ bind(done); 910 __ bind(done);
909 __ addq(rsp, 3 * Interpreter::stackElementSize()); 911 __ addptr(rsp, 3 * Interpreter::stackElementSize());
910 } 912 }
911 913
912 void TemplateTable::bastore() { 914 void TemplateTable::bastore() {
913 transition(itos, vtos); 915 transition(itos, vtos);
914 __ pop_i(rbx); 916 __ pop_i(rbx);
966 } 968 }
967 969
968 void TemplateTable::astore(int n) { 970 void TemplateTable::astore(int n) {
969 transition(vtos, vtos); 971 transition(vtos, vtos);
970 __ pop_ptr(rax, rdx); 972 __ pop_ptr(rax, rdx);
971 __ movq(aaddress(n), rax); 973 __ movptr(aaddress(n), rax);
972 __ tag_local(rdx, n); 974 __ tag_local(rdx, n);
973 } 975 }
974 976
975 void TemplateTable::pop() { 977 void TemplateTable::pop() {
976 transition(vtos, vtos); 978 transition(vtos, vtos);
977 __ addq(rsp, Interpreter::stackElementSize()); 979 __ addptr(rsp, Interpreter::stackElementSize());
978 } 980 }
979 981
980 void TemplateTable::pop2() { 982 void TemplateTable::pop2() {
981 transition(vtos, vtos); 983 transition(vtos, vtos);
982 __ addq(rsp, 2 * Interpreter::stackElementSize()); 984 __ addptr(rsp, 2 * Interpreter::stackElementSize());
983 } 985 }
984 986
985 void TemplateTable::dup() { 987 void TemplateTable::dup() {
986 transition(vtos, vtos); 988 transition(vtos, vtos);
987 __ load_ptr_and_tag(0, rax, rdx); 989 __ load_ptr_and_tag(0, rax, rdx);
1088 } 1090 }
1089 1091
1090 void TemplateTable::lop2(Operation op) { 1092 void TemplateTable::lop2(Operation op) {
1091 transition(ltos, ltos); 1093 transition(ltos, ltos);
1092 switch (op) { 1094 switch (op) {
1093 case add : __ pop_l(rdx); __ addq (rax, rdx); break; 1095 case add : __ pop_l(rdx); __ addptr (rax, rdx); break;
1094 case sub : __ movq(rdx, rax); __ pop_l(rax); __ subq (rax, rdx); break; 1096 case sub : __ mov(rdx, rax); __ pop_l(rax); __ subptr (rax, rdx); break;
1095 case _and : __ pop_l(rdx); __ andq (rax, rdx); break; 1097 case _and : __ pop_l(rdx); __ andptr (rax, rdx); break;
1096 case _or : __ pop_l(rdx); __ orq (rax, rdx); break; 1098 case _or : __ pop_l(rdx); __ orptr (rax, rdx); break;
1097 case _xor : __ pop_l(rdx); __ xorq (rax, rdx); break; 1099 case _xor : __ pop_l(rdx); __ xorptr (rax, rdx); break;
1098 default : ShouldNotReachHere(); 1100 default : ShouldNotReachHere();
1099 } 1101 }
1100 } 1102 }
1101 1103
1102 void TemplateTable::idiv() { 1104 void TemplateTable::idiv() {
1128 __ imulq(rax, rdx); 1130 __ imulq(rax, rdx);
1129 } 1131 }
1130 1132
1131 void TemplateTable::ldiv() { 1133 void TemplateTable::ldiv() {
1132 transition(ltos, ltos); 1134 transition(ltos, ltos);
1133 __ movq(rcx, rax); 1135 __ mov(rcx, rax);
1134 __ pop_l(rax); 1136 __ pop_l(rax);
1135 // generate explicit div0 check 1137 // generate explicit div0 check
1136 __ testq(rcx, rcx); 1138 __ testq(rcx, rcx);
1137 __ jump_cc(Assembler::zero, 1139 __ jump_cc(Assembler::zero,
1138 ExternalAddress(Interpreter::_throw_ArithmeticException_entry)); 1140 ExternalAddress(Interpreter::_throw_ArithmeticException_entry));
1143 __ corrected_idivq(rcx); // kills rbx 1145 __ corrected_idivq(rcx); // kills rbx
1144 } 1146 }
1145 1147
1146 void TemplateTable::lrem() { 1148 void TemplateTable::lrem() {
1147 transition(ltos, ltos); 1149 transition(ltos, ltos);
1148 __ movq(rcx, rax); 1150 __ mov(rcx, rax);
1149 __ pop_l(rax); 1151 __ pop_l(rax);
1150 __ testq(rcx, rcx); 1152 __ testq(rcx, rcx);
1151 __ jump_cc(Assembler::zero, 1153 __ jump_cc(Assembler::zero,
1152 ExternalAddress(Interpreter::_throw_ArithmeticException_entry)); 1154 ExternalAddress(Interpreter::_throw_ArithmeticException_entry));
1153 // Note: could xor rax and rcx and compare with (-1 ^ min_int). If 1155 // Note: could xor rax and rcx and compare with (-1 ^ min_int). If
1154 // they are not equal, one could do a normal division (no correction 1156 // they are not equal, one could do a normal division (no correction
1155 // needed), which may speed up this implementation for the common case. 1157 // needed), which may speed up this implementation for the common case.
1156 // (see also JVM spec., p.243 & p.271) 1158 // (see also JVM spec., p.243 & p.271)
1157 __ corrected_idivq(rcx); // kills rbx 1159 __ corrected_idivq(rcx); // kills rbx
1158 __ movq(rax, rdx); 1160 __ mov(rax, rdx);
1159 } 1161 }
1160 1162
1161 void TemplateTable::lshl() { 1163 void TemplateTable::lshl() {
1162 transition(itos, ltos); 1164 transition(itos, ltos);
1163 __ movl(rcx, rax); // get shift count 1165 __ movl(rcx, rax); // get shift count
1182 void TemplateTable::fop2(Operation op) { 1184 void TemplateTable::fop2(Operation op) {
1183 transition(ftos, ftos); 1185 transition(ftos, ftos);
1184 switch (op) { 1186 switch (op) {
1185 case add: 1187 case add:
1186 __ addss(xmm0, at_rsp()); 1188 __ addss(xmm0, at_rsp());
1187 __ addq(rsp, Interpreter::stackElementSize()); 1189 __ addptr(rsp, Interpreter::stackElementSize());
1188 break; 1190 break;
1189 case sub: 1191 case sub:
1190 __ movflt(xmm1, xmm0); 1192 __ movflt(xmm1, xmm0);
1191 __ pop_f(xmm0); 1193 __ pop_f(xmm0);
1192 __ subss(xmm0, xmm1); 1194 __ subss(xmm0, xmm1);
1193 break; 1195 break;
1194 case mul: 1196 case mul:
1195 __ mulss(xmm0, at_rsp()); 1197 __ mulss(xmm0, at_rsp());
1196 __ addq(rsp, Interpreter::stackElementSize()); 1198 __ addptr(rsp, Interpreter::stackElementSize());
1197 break; 1199 break;
1198 case div: 1200 case div:
1199 __ movflt(xmm1, xmm0); 1201 __ movflt(xmm1, xmm0);
1200 __ pop_f(xmm0); 1202 __ pop_f(xmm0);
1201 __ divss(xmm0, xmm1); 1203 __ divss(xmm0, xmm1);
1214 void TemplateTable::dop2(Operation op) { 1216 void TemplateTable::dop2(Operation op) {
1215 transition(dtos, dtos); 1217 transition(dtos, dtos);
1216 switch (op) { 1218 switch (op) {
1217 case add: 1219 case add:
1218 __ addsd(xmm0, at_rsp()); 1220 __ addsd(xmm0, at_rsp());
1219 __ addq(rsp, 2 * Interpreter::stackElementSize()); 1221 __ addptr(rsp, 2 * Interpreter::stackElementSize());
1220 break; 1222 break;
1221 case sub: 1223 case sub:
1222 __ movdbl(xmm1, xmm0); 1224 __ movdbl(xmm1, xmm0);
1223 __ pop_d(xmm0); 1225 __ pop_d(xmm0);
1224 __ subsd(xmm0, xmm1); 1226 __ subsd(xmm0, xmm1);
1225 break; 1227 break;
1226 case mul: 1228 case mul:
1227 __ mulsd(xmm0, at_rsp()); 1229 __ mulsd(xmm0, at_rsp());
1228 __ addq(rsp, 2 * Interpreter::stackElementSize()); 1230 __ addptr(rsp, 2 * Interpreter::stackElementSize());
1229 break; 1231 break;
1230 case div: 1232 case div:
1231 __ movdbl(xmm1, xmm0); 1233 __ movdbl(xmm1, xmm0);
1232 __ pop_d(xmm0); 1234 __ pop_d(xmm0);
1233 __ divsd(xmm0, xmm1); 1235 __ divsd(xmm0, xmm1);
1484 __ bswapl(rdx); 1486 __ bswapl(rdx);
1485 1487
1486 if (!is_wide) { 1488 if (!is_wide) {
1487 __ sarl(rdx, 16); 1489 __ sarl(rdx, 16);
1488 } 1490 }
1489 __ movslq(rdx, rdx); 1491 __ movl2ptr(rdx, rdx);
1490 1492
1491 // Handle all the JSR stuff here, then exit. 1493 // Handle all the JSR stuff here, then exit.
1492 // It's much shorter and cleaner than intermingling with the non-JSR 1494 // It's much shorter and cleaner than intermingling with the non-JSR
1493 // normal-branch stuff occuring below. 1495 // normal-branch stuff occuring below.
1494 if (is_jsr) { 1496 if (is_jsr) {
1495 // Pre-load the next target bytecode into rbx 1497 // Pre-load the next target bytecode into rbx
1496 __ load_unsigned_byte(rbx, Address(r13, rdx, Address::times_1, 0)); 1498 __ load_unsigned_byte(rbx, Address(r13, rdx, Address::times_1, 0));
1497 1499
1498 // compute return address as bci in rax 1500 // compute return address as bci in rax
1499 __ leaq(rax, at_bcp((is_wide ? 5 : 3) - 1501 __ lea(rax, at_bcp((is_wide ? 5 : 3) -
1500 in_bytes(constMethodOopDesc::codes_offset()))); 1502 in_bytes(constMethodOopDesc::codes_offset())));
1501 __ subq(rax, Address(rcx, methodOopDesc::const_offset())); 1503 __ subptr(rax, Address(rcx, methodOopDesc::const_offset()));
1502 // Adjust the bcp in r13 by the displacement in rdx 1504 // Adjust the bcp in r13 by the displacement in rdx
1503 __ addq(r13, rdx); 1505 __ addptr(r13, rdx);
1504 // jsr returns atos that is not an oop 1506 // jsr returns atos that is not an oop
1505 __ push_i(rax); 1507 __ push_i(rax);
1506 __ dispatch_only(vtos); 1508 __ dispatch_only(vtos);
1507 return; 1509 return;
1508 } 1510 }
1509 1511
1510 // Normal (non-jsr) branch handling 1512 // Normal (non-jsr) branch handling
1511 1513
1512 // Adjust the bcp in r13 by the displacement in rdx 1514 // Adjust the bcp in r13 by the displacement in rdx
1513 __ addq(r13, rdx); 1515 __ addptr(r13, rdx);
1514 1516
1515 assert(UseLoopCounter || !UseOnStackReplacement, 1517 assert(UseLoopCounter || !UseOnStackReplacement,
1516 "on-stack-replacement requires loop counters"); 1518 "on-stack-replacement requires loop counters");
1517 Label backedge_counter_overflow; 1519 Label backedge_counter_overflow;
1518 Label profile_method; 1520 Label profile_method;
1592 __ bind(profile_method); 1594 __ bind(profile_method);
1593 __ call_VM(noreg, 1595 __ call_VM(noreg,
1594 CAST_FROM_FN_PTR(address, 1596 CAST_FROM_FN_PTR(address,
1595 InterpreterRuntime::profile_method), r13); 1597 InterpreterRuntime::profile_method), r13);
1596 __ load_unsigned_byte(rbx, Address(r13, 0)); // restore target bytecode 1598 __ load_unsigned_byte(rbx, Address(r13, 0)); // restore target bytecode
1597 __ movq(rcx, Address(rbp, method_offset)); 1599 __ movptr(rcx, Address(rbp, method_offset));
1598 __ movq(rcx, Address(rcx, 1600 __ movptr(rcx, Address(rcx,
1599 in_bytes(methodOopDesc::method_data_offset()))); 1601 in_bytes(methodOopDesc::method_data_offset())));
1600 __ movq(Address(rbp, frame::interpreter_frame_mdx_offset * wordSize), 1602 __ movptr(Address(rbp, frame::interpreter_frame_mdx_offset * wordSize),
1601 rcx); 1603 rcx);
1602 __ test_method_data_pointer(rcx, dispatch); 1604 __ test_method_data_pointer(rcx, dispatch);
1603 // offset non-null mdp by MDO::data_offset() + IR::profile_method() 1605 // offset non-null mdp by MDO::data_offset() + IR::profile_method()
1604 __ addq(rcx, in_bytes(methodDataOopDesc::data_offset())); 1606 __ addptr(rcx, in_bytes(methodDataOopDesc::data_offset()));
1605 __ addq(rcx, rax); 1607 __ addptr(rcx, rax);
1606 __ movq(Address(rbp, frame::interpreter_frame_mdx_offset * wordSize), 1608 __ movptr(Address(rbp, frame::interpreter_frame_mdx_offset * wordSize),
1607 rcx); 1609 rcx);
1608 __ jmp(dispatch); 1610 __ jmp(dispatch);
1609 } 1611 }
1610 1612
1611 if (UseOnStackReplacement) { 1613 if (UseOnStackReplacement) {
1612 // invocation counter overflow 1614 // invocation counter overflow
1613 __ bind(backedge_counter_overflow); 1615 __ bind(backedge_counter_overflow);
1614 __ negq(rdx); 1616 __ negptr(rdx);
1615 __ addq(rdx, r13); // branch bcp 1617 __ addptr(rdx, r13); // branch bcp
1616 // IcoResult frequency_counter_overflow([JavaThread*], address branch_bcp) 1618 // IcoResult frequency_counter_overflow([JavaThread*], address branch_bcp)
1617 __ call_VM(noreg, 1619 __ call_VM(noreg,
1618 CAST_FROM_FN_PTR(address, 1620 CAST_FROM_FN_PTR(address,
1619 InterpreterRuntime::frequency_counter_overflow), 1621 InterpreterRuntime::frequency_counter_overflow),
1620 rdx); 1622 rdx);
1623 // rax: osr nmethod (osr ok) or NULL (osr not possible) 1625 // rax: osr nmethod (osr ok) or NULL (osr not possible)
1624 // ebx: target bytecode 1626 // ebx: target bytecode
1625 // rdx: scratch 1627 // rdx: scratch
1626 // r14: locals pointer 1628 // r14: locals pointer
1627 // r13: bcp 1629 // r13: bcp
1628 __ testq(rax, rax); // test result 1630 __ testptr(rax, rax); // test result
1629 __ jcc(Assembler::zero, dispatch); // no osr if null 1631 __ jcc(Assembler::zero, dispatch); // no osr if null
1630 // nmethod may have been invalidated (VM may block upon call_VM return) 1632 // nmethod may have been invalidated (VM may block upon call_VM return)
1631 __ movl(rcx, Address(rax, nmethod::entry_bci_offset())); 1633 __ movl(rcx, Address(rax, nmethod::entry_bci_offset()));
1632 __ cmpl(rcx, InvalidOSREntryBci); 1634 __ cmpl(rcx, InvalidOSREntryBci);
1633 __ jcc(Assembler::equal, dispatch); 1635 __ jcc(Assembler::equal, dispatch);
1634 1636
1635 // We have the address of an on stack replacement routine in eax 1637 // We have the address of an on stack replacement routine in eax
1636 // We need to prepare to execute the OSR method. First we must 1638 // We need to prepare to execute the OSR method. First we must
1637 // migrate the locals and monitors off of the stack. 1639 // migrate the locals and monitors off of the stack.
1638 1640
1639 __ movq(r13, rax); // save the nmethod 1641 __ mov(r13, rax); // save the nmethod
1640 1642
1641 call_VM(noreg, CAST_FROM_FN_PTR(address, SharedRuntime::OSR_migration_begin)); 1643 call_VM(noreg, CAST_FROM_FN_PTR(address, SharedRuntime::OSR_migration_begin));
1642 1644
1643 // eax is OSR buffer, move it to expected parameter location 1645 // eax is OSR buffer, move it to expected parameter location
1644 __ movq(j_rarg0, rax); 1646 __ mov(j_rarg0, rax);
1645 1647
1646 // We use j_rarg definitions here so that registers don't conflict as parameter 1648 // We use j_rarg definitions here so that registers don't conflict as parameter
1647 // registers change across platforms as we are in the midst of a calling 1649 // registers change across platforms as we are in the midst of a calling
1648 // sequence to the OSR nmethod and we don't want collision. These are NOT parameters. 1650 // sequence to the OSR nmethod and we don't want collision. These are NOT parameters.
1649 1651
1650 const Register retaddr = j_rarg2; 1652 const Register retaddr = j_rarg2;
1651 const Register sender_sp = j_rarg1; 1653 const Register sender_sp = j_rarg1;
1652 1654
1653 // pop the interpreter frame 1655 // pop the interpreter frame
1654 __ movq(sender_sp, Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize)); // get sender sp 1656 __ movptr(sender_sp, Address(rbp, frame::interpreter_frame_sender_sp_offset * wordSize)); // get sender sp
1655 __ leave(); // remove frame anchor 1657 __ leave(); // remove frame anchor
1656 __ popq(retaddr); // get return address 1658 __ pop(retaddr); // get return address
1657 __ movq(rsp, sender_sp); // set sp to sender sp 1659 __ mov(rsp, sender_sp); // set sp to sender sp
1658 // Ensure compiled code always sees stack at proper alignment 1660 // Ensure compiled code always sees stack at proper alignment
1659 __ andq(rsp, -(StackAlignmentInBytes)); 1661 __ andptr(rsp, -(StackAlignmentInBytes));
1660 1662
1661 // unlike x86 we need no specialized return from compiled code 1663 // unlike x86 we need no specialized return from compiled code
1662 // to the interpreter or the call stub. 1664 // to the interpreter or the call stub.
1663 1665
1664 // push the return address 1666 // push the return address
1665 __ pushq(retaddr); 1667 __ push(retaddr);
1666 1668
1667 // and begin the OSR nmethod 1669 // and begin the OSR nmethod
1668 __ jmp(Address(r13, nmethod::osr_entry_point_offset())); 1670 __ jmp(Address(r13, nmethod::osr_entry_point_offset()));
1669 } 1671 }
1670 } 1672 }
1696 1698
1697 void TemplateTable::if_nullcmp(Condition cc) { 1699 void TemplateTable::if_nullcmp(Condition cc) {
1698 transition(atos, vtos); 1700 transition(atos, vtos);
1699 // assume branch is more often taken than not (loops use backward branches) 1701 // assume branch is more often taken than not (loops use backward branches)
1700 Label not_taken; 1702 Label not_taken;
1701 __ testq(rax, rax); 1703 __ testptr(rax, rax);
1702 __ jcc(j_not(cc), not_taken); 1704 __ jcc(j_not(cc), not_taken);
1703 branch(false, false); 1705 branch(false, false);
1704 __ bind(not_taken); 1706 __ bind(not_taken);
1705 __ profile_not_taken_branch(rax); 1707 __ profile_not_taken_branch(rax);
1706 } 1708 }
1708 void TemplateTable::if_acmp(Condition cc) { 1710 void TemplateTable::if_acmp(Condition cc) {
1709 transition(atos, vtos); 1711 transition(atos, vtos);
1710 // assume branch is more often taken than not (loops use backward branches) 1712 // assume branch is more often taken than not (loops use backward branches)
1711 Label not_taken; 1713 Label not_taken;
1712 __ pop_ptr(rdx); 1714 __ pop_ptr(rdx);
1713 __ cmpq(rdx, rax); 1715 __ cmpptr(rdx, rax);
1714 __ jcc(j_not(cc), not_taken); 1716 __ jcc(j_not(cc), not_taken);
1715 branch(false, false); 1717 branch(false, false);
1716 __ bind(not_taken); 1718 __ bind(not_taken);
1717 __ profile_not_taken_branch(rax); 1719 __ profile_not_taken_branch(rax);
1718 } 1720 }
1719 1721
1720 void TemplateTable::ret() { 1722 void TemplateTable::ret() {
1721 transition(vtos, vtos); 1723 transition(vtos, vtos);
1722 locals_index(rbx); 1724 locals_index(rbx);
1723 __ movq(rbx, aaddress(rbx)); // get return bci, compute return bcp 1725 __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
1724 __ profile_ret(rbx, rcx); 1726 __ profile_ret(rbx, rcx);
1725 __ get_method(rax); 1727 __ get_method(rax);
1726 __ movq(r13, Address(rax, methodOopDesc::const_offset())); 1728 __ movptr(r13, Address(rax, methodOopDesc::const_offset()));
1727 __ leaq(r13, Address(r13, rbx, Address::times_1, 1729 __ lea(r13, Address(r13, rbx, Address::times_1,
1728 constMethodOopDesc::codes_offset())); 1730 constMethodOopDesc::codes_offset()));
1729 __ dispatch_next(vtos); 1731 __ dispatch_next(vtos);
1730 } 1732 }
1731 1733
1732 void TemplateTable::wide_ret() { 1734 void TemplateTable::wide_ret() {
1733 transition(vtos, vtos); 1735 transition(vtos, vtos);
1734 locals_index_wide(rbx); 1736 locals_index_wide(rbx);
1735 __ movq(rbx, aaddress(rbx)); // get return bci, compute return bcp 1737 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
1736 __ profile_ret(rbx, rcx); 1738 __ profile_ret(rbx, rcx);
1737 __ get_method(rax); 1739 __ get_method(rax);
1738 __ movq(r13, Address(rax, methodOopDesc::const_offset())); 1740 __ movptr(r13, Address(rax, methodOopDesc::const_offset()));
1739 __ leaq(r13, Address(r13, rbx, Address::times_1, constMethodOopDesc::codes_offset())); 1741 __ lea(r13, Address(r13, rbx, Address::times_1, constMethodOopDesc::codes_offset()));
1740 __ dispatch_next(vtos); 1742 __ dispatch_next(vtos);
1741 } 1743 }
1742 1744
1743 void TemplateTable::tableswitch() { 1745 void TemplateTable::tableswitch() {
1744 Label default_case, continue_execution; 1746 Label default_case, continue_execution;
1745 transition(itos, vtos); 1747 transition(itos, vtos);
1746 // align r13 1748 // align r13
1747 __ leaq(rbx, at_bcp(BytesPerInt)); 1749 __ lea(rbx, at_bcp(BytesPerInt));
1748 __ andq(rbx, -BytesPerInt); 1750 __ andptr(rbx, -BytesPerInt);
1749 // load lo & hi 1751 // load lo & hi
1750 __ movl(rcx, Address(rbx, BytesPerInt)); 1752 __ movl(rcx, Address(rbx, BytesPerInt));
1751 __ movl(rdx, Address(rbx, 2 * BytesPerInt)); 1753 __ movl(rdx, Address(rbx, 2 * BytesPerInt));
1752 __ bswapl(rcx); 1754 __ bswapl(rcx);
1753 __ bswapl(rdx); 1755 __ bswapl(rdx);
1761 __ movl(rdx, Address(rbx, rax, Address::times_4, 3 * BytesPerInt)); 1763 __ movl(rdx, Address(rbx, rax, Address::times_4, 3 * BytesPerInt));
1762 __ profile_switch_case(rax, rbx, rcx); 1764 __ profile_switch_case(rax, rbx, rcx);
1763 // continue execution 1765 // continue execution
1764 __ bind(continue_execution); 1766 __ bind(continue_execution);
1765 __ bswapl(rdx); 1767 __ bswapl(rdx);
1766 __ movslq(rdx, rdx); 1768 __ movl2ptr(rdx, rdx);
1767 __ load_unsigned_byte(rbx, Address(r13, rdx, Address::times_1)); 1769 __ load_unsigned_byte(rbx, Address(r13, rdx, Address::times_1));
1768 __ addq(r13, rdx); 1770 __ addptr(r13, rdx);
1769 __ dispatch_only(vtos); 1771 __ dispatch_only(vtos);
1770 // handle default 1772 // handle default
1771 __ bind(default_case); 1773 __ bind(default_case);
1772 __ profile_switch_default(rax); 1774 __ profile_switch_default(rax);
1773 __ movl(rdx, Address(rbx, 0)); 1775 __ movl(rdx, Address(rbx, 0));
1783 transition(itos, vtos); 1785 transition(itos, vtos);
1784 Label loop_entry, loop, found, continue_execution; 1786 Label loop_entry, loop, found, continue_execution;
1785 // bswap rax so we can avoid bswapping the table entries 1787 // bswap rax so we can avoid bswapping the table entries
1786 __ bswapl(rax); 1788 __ bswapl(rax);
1787 // align r13 1789 // align r13
1788 __ leaq(rbx, at_bcp(BytesPerInt)); // btw: should be able to get rid of 1790 __ lea(rbx, at_bcp(BytesPerInt)); // btw: should be able to get rid of
1789 // this instruction (change offsets 1791 // this instruction (change offsets
1790 // below) 1792 // below)
1791 __ andq(rbx, -BytesPerInt); 1793 __ andptr(rbx, -BytesPerInt);
1792 // set counter 1794 // set counter
1793 __ movl(rcx, Address(rbx, BytesPerInt)); 1795 __ movl(rcx, Address(rbx, BytesPerInt));
1794 __ bswapl(rcx); 1796 __ bswapl(rcx);
1795 __ jmpb(loop_entry); 1797 __ jmpb(loop_entry);
1796 // table search 1798 // table search
1809 __ movl(rdx, Address(rbx, rcx, Address::times_8, 3 * BytesPerInt)); 1811 __ movl(rdx, Address(rbx, rcx, Address::times_8, 3 * BytesPerInt));
1810 __ profile_switch_case(rcx, rax, rbx); 1812 __ profile_switch_case(rcx, rax, rbx);
1811 // continue execution 1813 // continue execution
1812 __ bind(continue_execution); 1814 __ bind(continue_execution);
1813 __ bswapl(rdx); 1815 __ bswapl(rdx);
1814 __ movslq(rdx, rdx); 1816 __ movl2ptr(rdx, rdx);
1815 __ load_unsigned_byte(rbx, Address(r13, rdx, Address::times_1)); 1817 __ load_unsigned_byte(rbx, Address(r13, rdx, Address::times_1));
1816 __ addq(r13, rdx); 1818 __ addptr(r13, rdx);
1817 __ dispatch_only(vtos); 1819 __ dispatch_only(vtos);
1818 } 1820 }
1819 1821
1820 void TemplateTable::fast_binaryswitch() { 1822 void TemplateTable::fast_binaryswitch() {
1821 transition(itos, vtos); 1823 transition(itos, vtos);
1851 const Register j = rdx; 1853 const Register j = rdx;
1852 const Register h = rdi; 1854 const Register h = rdi;
1853 const Register temp = rsi; 1855 const Register temp = rsi;
1854 1856
1855 // Find array start 1857 // Find array start
1856 __ leaq(array, at_bcp(3 * BytesPerInt)); // btw: should be able to 1858 __ lea(array, at_bcp(3 * BytesPerInt)); // btw: should be able to
1857 // get rid of this 1859 // get rid of this
1858 // instruction (change 1860 // instruction (change
1859 // offsets below) 1861 // offsets below)
1860 __ andq(array, -BytesPerInt); 1862 __ andptr(array, -BytesPerInt);
1861 1863
1862 // Initialize i & j 1864 // Initialize i & j
1863 __ xorl(i, i); // i = 0; 1865 __ xorl(i, i); // i = 0;
1864 __ movl(j, Address(array, -BytesPerInt)); // j = length(array); 1866 __ movl(j, Address(array, -BytesPerInt)); // j = length(array);
1865 1867
1907 1909
1908 // entry found -> j = offset 1910 // entry found -> j = offset
1909 __ movl(j , Address(array, i, Address::times_8, BytesPerInt)); 1911 __ movl(j , Address(array, i, Address::times_8, BytesPerInt));
1910 __ profile_switch_case(i, key, array); 1912 __ profile_switch_case(i, key, array);
1911 __ bswapl(j); 1913 __ bswapl(j);
1912 __ movslq(j, j); 1914 __ movl2ptr(j, j);
1913 __ load_unsigned_byte(rbx, Address(r13, j, Address::times_1)); 1915 __ load_unsigned_byte(rbx, Address(r13, j, Address::times_1));
1914 __ addq(r13, j); 1916 __ addptr(r13, j);
1915 __ dispatch_only(vtos); 1917 __ dispatch_only(vtos);
1916 1918
1917 // default case -> j = default offset 1919 // default case -> j = default offset
1918 __ bind(default_case); 1920 __ bind(default_case);
1919 __ profile_switch_default(i); 1921 __ profile_switch_default(i);
1920 __ movl(j, Address(array, -2 * BytesPerInt)); 1922 __ movl(j, Address(array, -2 * BytesPerInt));
1921 __ bswapl(j); 1923 __ bswapl(j);
1922 __ movslq(j, j); 1924 __ movl2ptr(j, j);
1923 __ load_unsigned_byte(rbx, Address(r13, j, Address::times_1)); 1925 __ load_unsigned_byte(rbx, Address(r13, j, Address::times_1));
1924 __ addq(r13, j); 1926 __ addptr(r13, j);
1925 __ dispatch_only(vtos); 1927 __ dispatch_only(vtos);
1926 } 1928 }
1927 1929
1928 1930
1929 void TemplateTable::_return(TosState state) { 1931 void TemplateTable::_return(TosState state) {
1931 assert(_desc->calls_vm(), 1933 assert(_desc->calls_vm(),
1932 "inconsistent calls_vm information"); // call in remove_activation 1934 "inconsistent calls_vm information"); // call in remove_activation
1933 1935
1934 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) { 1936 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
1935 assert(state == vtos, "only valid state"); 1937 assert(state == vtos, "only valid state");
1936 __ movq(c_rarg1, aaddress(0)); 1938 __ movptr(c_rarg1, aaddress(0));
1937 __ load_klass(rdi, c_rarg1); 1939 __ load_klass(rdi, c_rarg1);
1938 __ movl(rdi, Address(rdi, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc))); 1940 __ movl(rdi, Address(rdi, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc)));
1939 __ testl(rdi, JVM_ACC_HAS_FINALIZER); 1941 __ testl(rdi, JVM_ACC_HAS_FINALIZER);
1940 Label skip_register_finalizer; 1942 Label skip_register_finalizer;
1941 __ jcc(Assembler::zero, skip_register_finalizer); 1943 __ jcc(Assembler::zero, skip_register_finalizer);
2042 bool is_static = false) { 2044 bool is_static = false) {
2043 assert_different_registers(cache, index, flags, off); 2045 assert_different_registers(cache, index, flags, off);
2044 2046
2045 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset(); 2047 ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2046 // Field offset 2048 // Field offset
2047 __ movq(off, Address(cache, index, Address::times_8, 2049 __ movptr(off, Address(cache, index, Address::times_8,
2048 in_bytes(cp_base_offset + 2050 in_bytes(cp_base_offset +
2049 ConstantPoolCacheEntry::f2_offset()))); 2051 ConstantPoolCacheEntry::f2_offset())));
2050 // Flags 2052 // Flags
2051 __ movl(flags, Address(cache, index, Address::times_8, 2053 __ movl(flags, Address(cache, index, Address::times_8,
2052 in_bytes(cp_base_offset + 2054 in_bytes(cp_base_offset +
2053 ConstantPoolCacheEntry::flags_offset()))); 2055 ConstantPoolCacheEntry::flags_offset())));
2054 2056
2055 // klass overwrite register 2057 // klass overwrite register
2056 if (is_static) { 2058 if (is_static) {
2057 __ movq(obj, Address(cache, index, Address::times_8, 2059 __ movptr(obj, Address(cache, index, Address::times_8,
2058 in_bytes(cp_base_offset + 2060 in_bytes(cp_base_offset +
2059 ConstantPoolCacheEntry::f1_offset()))); 2061 ConstantPoolCacheEntry::f1_offset())));
2060 } 2062 }
2061 } 2063 }
2062 2064
2063 void TemplateTable::load_invoke_cp_cache_entry(int byte_no, 2065 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2064 Register method, 2066 Register method,
2086 ConstantPoolCacheEntry::f2_offset()); 2088 ConstantPoolCacheEntry::f2_offset());
2087 2089
2088 resolve_cache_and_index(byte_no, cache, index); 2090 resolve_cache_and_index(byte_no, cache, index);
2089 2091
2090 assert(wordSize == 8, "adjust code below"); 2092 assert(wordSize == 8, "adjust code below");
2091 __ movq(method, Address(cache, index, Address::times_8, method_offset)); 2093 __ movptr(method, Address(cache, index, Address::times_8, method_offset));
2092 if (itable_index != noreg) { 2094 if (itable_index != noreg) {
2093 __ movq(itable_index, 2095 __ movptr(itable_index,
2094 Address(cache, index, Address::times_8, index_offset)); 2096 Address(cache, index, Address::times_8, index_offset));
2095 } 2097 }
2096 __ movl(flags , Address(cache, index, Address::times_8, flags_offset)); 2098 __ movl(flags , Address(cache, index, Address::times_8, flags_offset));
2097 } 2099 }
2098 2100
2114 __ jcc(Assembler::zero, L1); 2116 __ jcc(Assembler::zero, L1);
2115 2117
2116 __ get_cache_and_index_at_bcp(c_rarg2, c_rarg3, 1); 2118 __ get_cache_and_index_at_bcp(c_rarg2, c_rarg3, 1);
2117 2119
2118 // cache entry pointer 2120 // cache entry pointer
2119 __ addq(c_rarg2, in_bytes(constantPoolCacheOopDesc::base_offset())); 2121 __ addptr(c_rarg2, in_bytes(constantPoolCacheOopDesc::base_offset()));
2120 __ shll(c_rarg3, LogBytesPerWord); 2122 __ shll(c_rarg3, LogBytesPerWord);
2121 __ addq(c_rarg2, c_rarg3); 2123 __ addptr(c_rarg2, c_rarg3);
2122 if (is_static) { 2124 if (is_static) {
2123 __ xorl(c_rarg1, c_rarg1); // NULL object reference 2125 __ xorl(c_rarg1, c_rarg1); // NULL object reference
2124 } else { 2126 } else {
2125 __ movq(c_rarg1, at_tos()); // get object pointer without popping it 2127 __ movptr(c_rarg1, at_tos()); // get object pointer without popping it
2126 __ verify_oop(c_rarg1); 2128 __ verify_oop(c_rarg1);
2127 } 2129 }
2128 // c_rarg1: object pointer or NULL 2130 // c_rarg1: object pointer or NULL
2129 // c_rarg2: cache entry pointer 2131 // c_rarg2: cache entry pointer
2130 // c_rarg3: jvalue object on the stack 2132 // c_rarg3: jvalue object on the stack
2317 ConstantPoolCacheEntry::flags_offset()))); 2319 ConstantPoolCacheEntry::flags_offset())));
2318 __ shrl(c_rarg3, ConstantPoolCacheEntry::tosBits); 2320 __ shrl(c_rarg3, ConstantPoolCacheEntry::tosBits);
2319 // Make sure we don't need to mask rcx for tosBits after the 2321 // Make sure we don't need to mask rcx for tosBits after the
2320 // above shift 2322 // above shift
2321 ConstantPoolCacheEntry::verify_tosBits(); 2323 ConstantPoolCacheEntry::verify_tosBits();
2322 __ movq(c_rarg1, at_tos_p1()); // initially assume a one word jvalue 2324 __ movptr(c_rarg1, at_tos_p1()); // initially assume a one word jvalue
2323 __ cmpl(c_rarg3, ltos); 2325 __ cmpl(c_rarg3, ltos);
2324 __ cmovq(Assembler::equal, 2326 __ cmovptr(Assembler::equal,
2325 c_rarg1, at_tos_p2()); // ltos (two word jvalue) 2327 c_rarg1, at_tos_p2()); // ltos (two word jvalue)
2326 __ cmpl(c_rarg3, dtos); 2328 __ cmpl(c_rarg3, dtos);
2327 __ cmovq(Assembler::equal, 2329 __ cmovptr(Assembler::equal,
2328 c_rarg1, at_tos_p2()); // dtos (two word jvalue) 2330 c_rarg1, at_tos_p2()); // dtos (two word jvalue)
2329 } 2331 }
2330 // cache entry pointer 2332 // cache entry pointer
2331 __ addq(c_rarg2, in_bytes(cp_base_offset)); 2333 __ addptr(c_rarg2, in_bytes(cp_base_offset));
2332 __ shll(rscratch1, LogBytesPerWord); 2334 __ shll(rscratch1, LogBytesPerWord);
2333 __ addq(c_rarg2, rscratch1); 2335 __ addptr(c_rarg2, rscratch1);
2334 // object (tos) 2336 // object (tos)
2335 __ movq(c_rarg3, rsp); 2337 __ mov(c_rarg3, rsp);
2336 // c_rarg1: object pointer set up above (NULL if static) 2338 // c_rarg1: object pointer set up above (NULL if static)
2337 // c_rarg2: cache entry pointer 2339 // c_rarg2: cache entry pointer
2338 // c_rarg3: jvalue object on the stack 2340 // c_rarg3: jvalue object on the stack
2339 __ call_VM(noreg, 2341 __ call_VM(noreg,
2340 CAST_FROM_FN_PTR(address, 2342 CAST_FROM_FN_PTR(address,
2508 __ testl(c_rarg3, c_rarg3); 2510 __ testl(c_rarg3, c_rarg3);
2509 __ jcc(Assembler::zero, L2); 2511 __ jcc(Assembler::zero, L2);
2510 __ pop_ptr(rbx); // copy the object pointer from tos 2512 __ pop_ptr(rbx); // copy the object pointer from tos
2511 __ verify_oop(rbx); 2513 __ verify_oop(rbx);
2512 __ push_ptr(rbx); // put the object pointer back on tos 2514 __ push_ptr(rbx); // put the object pointer back on tos
2513 __ subq(rsp, sizeof(jvalue)); // add space for a jvalue object 2515 __ subptr(rsp, sizeof(jvalue)); // add space for a jvalue object
2514 __ movq(c_rarg3, rsp); 2516 __ mov(c_rarg3, rsp);
2515 const Address field(c_rarg3, 0); 2517 const Address field(c_rarg3, 0);
2516 2518
2517 switch (bytecode()) { // load values into the jvalue object 2519 switch (bytecode()) { // load values into the jvalue object
2518 case Bytecodes::_fast_aputfield: __ movq(field, rax); break; 2520 case Bytecodes::_fast_aputfield: __ movq(field, rax); break;
2519 case Bytecodes::_fast_lputfield: __ movq(field, rax); break; 2521 case Bytecodes::_fast_lputfield: __ movq(field, rax); break;
2527 ShouldNotReachHere(); 2529 ShouldNotReachHere();
2528 } 2530 }
2529 2531
2530 // Save rax because call_VM() will clobber it, then use it for 2532 // Save rax because call_VM() will clobber it, then use it for
2531 // JVMTI purposes 2533 // JVMTI purposes
2532 __ pushq(rax); 2534 __ push(rax);
2533 // access constant pool cache entry 2535 // access constant pool cache entry
2534 __ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1); 2536 __ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1);
2535 __ verify_oop(rbx); 2537 __ verify_oop(rbx);
2536 // rbx: object pointer copied above 2538 // rbx: object pointer copied above
2537 // c_rarg2: cache entry pointer 2539 // c_rarg2: cache entry pointer
2538 // c_rarg3: jvalue object on the stack 2540 // c_rarg3: jvalue object on the stack
2539 __ call_VM(noreg, 2541 __ call_VM(noreg,
2540 CAST_FROM_FN_PTR(address, 2542 CAST_FROM_FN_PTR(address,
2541 InterpreterRuntime::post_field_modification), 2543 InterpreterRuntime::post_field_modification),
2542 rbx, c_rarg2, c_rarg3); 2544 rbx, c_rarg2, c_rarg3);
2543 __ popq(rax); // restore lower value 2545 __ pop(rax); // restore lower value
2544 __ addq(rsp, sizeof(jvalue)); // release jvalue object space 2546 __ addptr(rsp, sizeof(jvalue)); // release jvalue object space
2545 __ bind(L2); 2547 __ bind(L2);
2546 } 2548 }
2547 } 2549 }
2548 2550
2549 void TemplateTable::fast_storefield(TosState state) { 2551 void TemplateTable::fast_storefield(TosState state) {
2560 __ movl(rdx, Address(rcx, rbx, Address::times_8, 2562 __ movl(rdx, Address(rcx, rbx, Address::times_8,
2561 in_bytes(base + 2563 in_bytes(base +
2562 ConstantPoolCacheEntry::flags_offset()))); 2564 ConstantPoolCacheEntry::flags_offset())));
2563 2565
2564 // replace index with field offset from cache entry 2566 // replace index with field offset from cache entry
2565 __ movq(rbx, Address(rcx, rbx, Address::times_8, 2567 __ movptr(rbx, Address(rcx, rbx, Address::times_8,
2566 in_bytes(base + ConstantPoolCacheEntry::f2_offset()))); 2568 in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
2567 2569
2568 // [jk] not needed currently 2570 // [jk] not needed currently
2569 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore | 2571 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2570 // Assembler::StoreStore)); 2572 // Assembler::StoreStore));
2571 2573
2630 __ testl(rcx, rcx); 2632 __ testl(rcx, rcx);
2631 __ jcc(Assembler::zero, L1); 2633 __ jcc(Assembler::zero, L1);
2632 // access constant pool cache entry 2634 // access constant pool cache entry
2633 __ get_cache_entry_pointer_at_bcp(c_rarg2, rcx, 1); 2635 __ get_cache_entry_pointer_at_bcp(c_rarg2, rcx, 1);
2634 __ verify_oop(rax); 2636 __ verify_oop(rax);
2635 __ movq(r12, rax); // save object pointer before call_VM() clobbers it 2637 __ mov(r12, rax); // save object pointer before call_VM() clobbers it
2636 __ movq(c_rarg1, rax); 2638 __ mov(c_rarg1, rax);
2637 // c_rarg1: object pointer copied above 2639 // c_rarg1: object pointer copied above
2638 // c_rarg2: cache entry pointer 2640 // c_rarg2: cache entry pointer
2639 __ call_VM(noreg, 2641 __ call_VM(noreg,
2640 CAST_FROM_FN_PTR(address, 2642 CAST_FROM_FN_PTR(address,
2641 InterpreterRuntime::post_field_access), 2643 InterpreterRuntime::post_field_access),
2642 c_rarg1, c_rarg2); 2644 c_rarg1, c_rarg2);
2643 __ movq(rax, r12); // restore object pointer 2645 __ mov(rax, r12); // restore object pointer
2644 __ reinit_heapbase(); 2646 __ reinit_heapbase();
2645 __ bind(L1); 2647 __ bind(L1);
2646 } 2648 }
2647 2649
2648 // access constant pool cache 2650 // access constant pool cache
2654 // in_bytes(constantPoolCacheOopDesc::base_offset() + 2656 // in_bytes(constantPoolCacheOopDesc::base_offset() +
2655 // ConstantPoolCacheEntry::flags_offset()))); 2657 // ConstantPoolCacheEntry::flags_offset())));
2656 // __ shrl(rdx, ConstantPoolCacheEntry::volatileField); 2658 // __ shrl(rdx, ConstantPoolCacheEntry::volatileField);
2657 // __ andl(rdx, 0x1); 2659 // __ andl(rdx, 0x1);
2658 // } 2660 // }
2659 __ movq(rbx, Address(rcx, rbx, Address::times_8, 2661 __ movptr(rbx, Address(rcx, rbx, Address::times_8,
2660 in_bytes(constantPoolCacheOopDesc::base_offset() + 2662 in_bytes(constantPoolCacheOopDesc::base_offset() +
2661 ConstantPoolCacheEntry::f2_offset()))); 2663 ConstantPoolCacheEntry::f2_offset())));
2662 2664
2663 // rax: object 2665 // rax: object
2664 __ verify_oop(rax); 2666 __ verify_oop(rax);
2665 __ null_check(rax); 2667 __ null_check(rax);
2666 Address field(rax, rbx, Address::times_1); 2668 Address field(rax, rbx, Address::times_1);
2707 2709
2708 void TemplateTable::fast_xaccess(TosState state) { 2710 void TemplateTable::fast_xaccess(TosState state) {
2709 transition(vtos, state); 2711 transition(vtos, state);
2710 2712
2711 // get receiver 2713 // get receiver
2712 __ movq(rax, aaddress(0)); 2714 __ movptr(rax, aaddress(0));
2713 debug_only(__ verify_local_tag(frame::TagReference, 0)); 2715 debug_only(__ verify_local_tag(frame::TagReference, 0));
2714 // access constant pool cache 2716 // access constant pool cache
2715 __ get_cache_and_index_at_bcp(rcx, rdx, 2); 2717 __ get_cache_and_index_at_bcp(rcx, rdx, 2);
2716 __ movq(rbx, 2718 __ movptr(rbx,
2717 Address(rcx, rdx, Address::times_8, 2719 Address(rcx, rdx, Address::times_8,
2718 in_bytes(constantPoolCacheOopDesc::base_offset() + 2720 in_bytes(constantPoolCacheOopDesc::base_offset() +
2719 ConstantPoolCacheEntry::f2_offset()))); 2721 ConstantPoolCacheEntry::f2_offset())));
2720 // make sure exception is reported in correct bcp range (getfield is 2722 // make sure exception is reported in correct bcp range (getfield is
2721 // next instruction) 2723 // next instruction)
2722 __ incrementq(r13); 2724 __ increment(r13);
2723 __ null_check(rax); 2725 __ null_check(rax);
2724 switch (state) { 2726 switch (state) {
2725 case itos: 2727 case itos:
2726 __ movl(rax, Address(rax, rbx, Address::times_1)); 2728 __ movl(rax, Address(rax, rbx, Address::times_1));
2727 break; 2729 break;
2747 // __ jcc(Assembler::zero, notVolatile); 2749 // __ jcc(Assembler::zero, notVolatile);
2748 // __ membar(Assembler::LoadLoad); 2750 // __ membar(Assembler::LoadLoad);
2749 // __ bind(notVolatile); 2751 // __ bind(notVolatile);
2750 // } 2752 // }
2751 2753
2752 __ decrementq(r13); 2754 __ decrement(r13);
2753 } 2755 }
2754 2756
2755 2757
2756 2758
2757 //----------------------------------------------------------------------------- 2759 //-----------------------------------------------------------------------------
2786 // load receiver if needed (note: no return address pushed yet) 2788 // load receiver if needed (note: no return address pushed yet)
2787 if (load_receiver) { 2789 if (load_receiver) {
2788 __ movl(recv, flags); 2790 __ movl(recv, flags);
2789 __ andl(recv, 0xFF); 2791 __ andl(recv, 0xFF);
2790 if (TaggedStackInterpreter) __ shll(recv, 1); // index*2 2792 if (TaggedStackInterpreter) __ shll(recv, 1); // index*2
2791 __ movq(recv, Address(rsp, recv, Address::times_8, 2793 __ movptr(recv, Address(rsp, recv, Address::times_8,
2792 -Interpreter::expr_offset_in_bytes(1))); 2794 -Interpreter::expr_offset_in_bytes(1)));
2793 __ verify_oop(recv); 2795 __ verify_oop(recv);
2794 } 2796 }
2795 2797
2796 // do null check if needed 2798 // do null check if needed
2809 // load return address 2811 // load return address
2810 { 2812 {
2811 ExternalAddress return_5((address)Interpreter::return_5_addrs_by_index_table()); 2813 ExternalAddress return_5((address)Interpreter::return_5_addrs_by_index_table());
2812 ExternalAddress return_3((address)Interpreter::return_3_addrs_by_index_table()); 2814 ExternalAddress return_3((address)Interpreter::return_3_addrs_by_index_table());
2813 __ lea(rscratch1, (is_invokeinterface ? return_5 : return_3)); 2815 __ lea(rscratch1, (is_invokeinterface ? return_5 : return_3));
2814 __ movq(flags, Address(rscratch1, flags, Address::times_8)); 2816 __ movptr(flags, Address(rscratch1, flags, Address::times_8));
2815 } 2817 }
2816 2818
2817 // push return address 2819 // push return address
2818 __ pushq(flags); 2820 __ push(flags);
2819 2821
2820 // Restore flag field from the constant pool cache, and restore esi 2822 // Restore flag field from the constant pool cache, and restore esi
2821 // for later null checks. r13 is the bytecode pointer 2823 // for later null checks. r13 is the bytecode pointer
2822 if (save_flags) { 2824 if (save_flags) {
2823 __ movl(flags, r13); 2825 __ movl(flags, r13);
2865 2867
2866 // get target methodOop & entry point 2868 // get target methodOop & entry point
2867 const int base = instanceKlass::vtable_start_offset() * wordSize; 2869 const int base = instanceKlass::vtable_start_offset() * wordSize;
2868 assert(vtableEntry::size() * wordSize == 8, 2870 assert(vtableEntry::size() * wordSize == 8,
2869 "adjust the scaling in the code below"); 2871 "adjust the scaling in the code below");
2870 __ movq(method, Address(rax, index, 2872 __ movptr(method, Address(rax, index,
2871 Address::times_8, 2873 Address::times_8,
2872 base + vtableEntry::method_offset_in_bytes())); 2874 base + vtableEntry::method_offset_in_bytes()));
2873 __ movq(rdx, Address(method, methodOopDesc::interpreter_entry_offset())); 2875 __ movptr(rdx, Address(method, methodOopDesc::interpreter_entry_offset()));
2874 __ jump_from_interpreted(method, rdx); 2876 __ jump_from_interpreted(method, rdx);
2875 } 2877 }
2876 2878
2877 2879
2878 void TemplateTable::invokevirtual(int byte_no) { 2880 void TemplateTable::invokevirtual(int byte_no) {
2938 __ verify_oop(rdx); 2940 __ verify_oop(rdx);
2939 2941
2940 // profile this call 2942 // profile this call
2941 __ profile_virtual_call(rdx, r13, r14); 2943 __ profile_virtual_call(rdx, r13, r14);
2942 2944
2943 __ movq(r14, rdx); // Save klassOop in r14 2945 __ mov(r14, rdx); // Save klassOop in r14
2944 2946
2945 // Compute start of first itableOffsetEntry (which is at the end of 2947 // Compute start of first itableOffsetEntry (which is at the end of
2946 // the vtable) 2948 // the vtable)
2947 const int base = instanceKlass::vtable_start_offset() * wordSize; 2949 const int base = instanceKlass::vtable_start_offset() * wordSize;
2948 // Get length of vtable 2950 // Get length of vtable
2949 assert(vtableEntry::size() * wordSize == 8, 2951 assert(vtableEntry::size() * wordSize == 8,
2950 "adjust the scaling in the code below"); 2952 "adjust the scaling in the code below");
2951 __ movl(r13, Address(rdx, 2953 __ movl(r13, Address(rdx,
2952 instanceKlass::vtable_length_offset() * wordSize)); 2954 instanceKlass::vtable_length_offset() * wordSize));
2953 __ leaq(rdx, Address(rdx, r13, Address::times_8, base)); 2955 __ lea(rdx, Address(rdx, r13, Address::times_8, base));
2954 2956
2955 if (HeapWordsPerLong > 1) { 2957 if (HeapWordsPerLong > 1) {
2956 // Round up to align_object_offset boundary 2958 // Round up to align_object_offset boundary
2957 __ round_to_q(rdx, BytesPerLong); 2959 __ round_to(rdx, BytesPerLong);
2958 } 2960 }
2959 2961
2960 Label entry, search, interface_ok; 2962 Label entry, search, interface_ok;
2961 2963
2962 __ jmpb(entry); 2964 __ jmpb(entry);
2963 __ bind(search); 2965 __ bind(search);
2964 __ addq(rdx, itableOffsetEntry::size() * wordSize); 2966 __ addptr(rdx, itableOffsetEntry::size() * wordSize);
2965 2967
2966 __ bind(entry); 2968 __ bind(entry);
2967 2969
2968 // Check that the entry is non-null. A null entry means that the 2970 // Check that the entry is non-null. A null entry means that the
2969 // receiver class doesn't implement the interface, and wasn't the 2971 // receiver class doesn't implement the interface, and wasn't the
2970 // same as the receiver class checked when the interface was 2972 // same as the receiver class checked when the interface was
2971 // resolved. 2973 // resolved.
2972 __ pushq(rdx); 2974 __ push(rdx);
2973 __ movq(rdx, Address(rdx, itableOffsetEntry::interface_offset_in_bytes())); 2975 __ movptr(rdx, Address(rdx, itableOffsetEntry::interface_offset_in_bytes()));
2974 __ testq(rdx, rdx); 2976 __ testptr(rdx, rdx);
2975 __ jcc(Assembler::notZero, interface_ok); 2977 __ jcc(Assembler::notZero, interface_ok);
2976 // throw exception 2978 // throw exception
2977 __ popq(rdx); // pop saved register first. 2979 __ pop(rdx); // pop saved register first.
2978 __ popq(rbx); // pop return address (pushed by prepare_invoke) 2980 __ pop(rbx); // pop return address (pushed by prepare_invoke)
2979 __ restore_bcp(); // r13 must be correct for exception handler (was 2981 __ restore_bcp(); // r13 must be correct for exception handler (was
2980 // destroyed) 2982 // destroyed)
2981 __ restore_locals(); // make sure locals pointer is correct as well 2983 __ restore_locals(); // make sure locals pointer is correct as well
2982 // (was destroyed) 2984 // (was destroyed)
2983 __ call_VM(noreg, CAST_FROM_FN_PTR(address, 2985 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2984 InterpreterRuntime::throw_IncompatibleClassChangeError)); 2986 InterpreterRuntime::throw_IncompatibleClassChangeError));
2985 // the call_VM checks for exception, so we should never return here. 2987 // the call_VM checks for exception, so we should never return here.
2986 __ should_not_reach_here(); 2988 __ should_not_reach_here();
2987 __ bind(interface_ok); 2989 __ bind(interface_ok);
2988 2990
2989 __ popq(rdx); 2991 __ pop(rdx);
2990 2992
2991 __ cmpq(rax, Address(rdx, itableOffsetEntry::interface_offset_in_bytes())); 2993 __ cmpptr(rax, Address(rdx, itableOffsetEntry::interface_offset_in_bytes()));
2992 __ jcc(Assembler::notEqual, search); 2994 __ jcc(Assembler::notEqual, search);
2993 2995
2994 __ movl(rdx, Address(rdx, itableOffsetEntry::offset_offset_in_bytes())); 2996 __ movl(rdx, Address(rdx, itableOffsetEntry::offset_offset_in_bytes()));
2995 2997
2996 __ addq(rdx, r14); // Add offset to klassOop 2998 __ addptr(rdx, r14); // Add offset to klassOop
2997 assert(itableMethodEntry::size() * wordSize == 8, 2999 assert(itableMethodEntry::size() * wordSize == 8,
2998 "adjust the scaling in the code below"); 3000 "adjust the scaling in the code below");
2999 __ movq(rbx, Address(rdx, rbx, Address::times_8)); 3001 __ movptr(rbx, Address(rdx, rbx, Address::times_8));
3000 // rbx: methodOop to call 3002 // rbx: methodOop to call
3001 // rcx: receiver 3003 // rcx: receiver
3002 // Check for abstract method error 3004 // Check for abstract method error
3003 // Note: This should be done more efficiently via a 3005 // Note: This should be done more efficiently via a
3004 // throw_abstract_method_error interpreter entry point and a 3006 // throw_abstract_method_error interpreter entry point and a
3005 // conditional jump to it in case of a null method. 3007 // conditional jump to it in case of a null method.
3006 { 3008 {
3007 Label L; 3009 Label L;
3008 __ testq(rbx, rbx); 3010 __ testptr(rbx, rbx);
3009 __ jcc(Assembler::notZero, L); 3011 __ jcc(Assembler::notZero, L);
3010 // throw exception 3012 // throw exception
3011 // note: must restore interpreter registers to canonical 3013 // note: must restore interpreter registers to canonical
3012 // state for exception handling to work correctly! 3014 // state for exception handling to work correctly!
3013 __ popq(rbx); // pop return address (pushed by prepare_invoke) 3015 __ pop(rbx); // pop return address (pushed by prepare_invoke)
3014 __ restore_bcp(); // r13 must be correct for exception handler 3016 __ restore_bcp(); // r13 must be correct for exception handler
3015 // (was destroyed) 3017 // (was destroyed)
3016 __ restore_locals(); // make sure locals pointer is correct as 3018 __ restore_locals(); // make sure locals pointer is correct as
3017 // well (was destroyed) 3019 // well (was destroyed)
3018 __ call_VM(noreg, 3020 __ call_VM(noreg,
3021 // the call_VM checks for exception, so we should never return here. 3023 // the call_VM checks for exception, so we should never return here.
3022 __ should_not_reach_here(); 3024 __ should_not_reach_here();
3023 __ bind(L); 3025 __ bind(L);
3024 } 3026 }
3025 3027
3026 __ movq(rcx, Address(rbx, methodOopDesc::interpreter_entry_offset())); 3028 __ movptr(rcx, Address(rbx, methodOopDesc::interpreter_entry_offset()));
3027 3029
3028 // do the call 3030 // do the call
3029 // rcx: receiver 3031 // rcx: receiver
3030 // rbx: methodOop 3032 // rbx: methodOop
3031 __ jump_from_interpreted(rbx, rdx); 3033 __ jump_from_interpreted(rbx, rdx);
3045 ExternalAddress top((address)Universe::heap()->top_addr()); 3047 ExternalAddress top((address)Universe::heap()->top_addr());
3046 ExternalAddress end((address)Universe::heap()->end_addr()); 3048 ExternalAddress end((address)Universe::heap()->end_addr());
3047 3049
3048 __ get_cpool_and_tags(rsi, rax); 3050 __ get_cpool_and_tags(rsi, rax);
3049 // get instanceKlass 3051 // get instanceKlass
3050 __ movq(rsi, Address(rsi, rdx, 3052 __ movptr(rsi, Address(rsi, rdx,
3051 Address::times_8, sizeof(constantPoolOopDesc))); 3053 Address::times_8, sizeof(constantPoolOopDesc)));
3052 3054
3053 // make sure the class we're about to instantiate has been 3055 // make sure the class we're about to instantiate has been
3054 // resolved. Note: slow_case does a pop of stack, which is why we 3056 // resolved. Note: slow_case does a pop of stack, which is why we
3055 // loaded class/pushed above 3057 // loaded class/pushed above
3056 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; 3058 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize;
3082 3084
3083 const bool allow_shared_alloc = 3085 const bool allow_shared_alloc =
3084 Universe::heap()->supports_inline_contig_alloc() && !CMSIncrementalMode; 3086 Universe::heap()->supports_inline_contig_alloc() && !CMSIncrementalMode;
3085 3087
3086 if (UseTLAB) { 3088 if (UseTLAB) {
3087 __ movq(rax, Address(r15_thread, in_bytes(JavaThread::tlab_top_offset()))); 3089 __ movptr(rax, Address(r15_thread, in_bytes(JavaThread::tlab_top_offset())));
3088 __ leaq(rbx, Address(rax, rdx, Address::times_1)); 3090 __ lea(rbx, Address(rax, rdx, Address::times_1));
3089 __ cmpq(rbx, Address(r15_thread, in_bytes(JavaThread::tlab_end_offset()))); 3091 __ cmpptr(rbx, Address(r15_thread, in_bytes(JavaThread::tlab_end_offset())));
3090 __ jcc(Assembler::above, allow_shared_alloc ? allocate_shared : slow_case); 3092 __ jcc(Assembler::above, allow_shared_alloc ? allocate_shared : slow_case);
3091 __ movq(Address(r15_thread, in_bytes(JavaThread::tlab_top_offset())), rbx); 3093 __ movptr(Address(r15_thread, in_bytes(JavaThread::tlab_top_offset())), rbx);
3092 if (ZeroTLAB) { 3094 if (ZeroTLAB) {
3093 // the fields have been already cleared 3095 // the fields have been already cleared
3094 __ jmp(initialize_header); 3096 __ jmp(initialize_header);
3095 } else { 3097 } else {
3096 // initialize both the header and fields 3098 // initialize both the header and fields
3107 const Register RtopAddr = rscratch1; 3109 const Register RtopAddr = rscratch1;
3108 const Register RendAddr = rscratch2; 3110 const Register RendAddr = rscratch2;
3109 3111
3110 __ lea(RtopAddr, top); 3112 __ lea(RtopAddr, top);
3111 __ lea(RendAddr, end); 3113 __ lea(RendAddr, end);
3112 __ movq(rax, Address(RtopAddr, 0)); 3114 __ movptr(rax, Address(RtopAddr, 0));
3113 3115
3114 // For retries rax gets set by cmpxchgq 3116 // For retries rax gets set by cmpxchgq
3115 Label retry; 3117 Label retry;
3116 __ bind(retry); 3118 __ bind(retry);
3117 __ leaq(rbx, Address(rax, rdx, Address::times_1)); 3119 __ lea(rbx, Address(rax, rdx, Address::times_1));
3118 __ cmpq(rbx, Address(RendAddr, 0)); 3120 __ cmpptr(rbx, Address(RendAddr, 0));
3119 __ jcc(Assembler::above, slow_case); 3121 __ jcc(Assembler::above, slow_case);
3120 3122
3121 // Compare rax with the top addr, and if still equal, store the new 3123 // Compare rax with the top addr, and if still equal, store the new
3122 // top addr in rbx at the address of the top addr pointer. Sets ZF if was 3124 // top addr in rbx at the address of the top addr pointer. Sets ZF if was
3123 // equal, and clears it otherwise. Use lock prefix for atomicity on MPs. 3125 // equal, and clears it otherwise. Use lock prefix for atomicity on MPs.
3126 // rbx: object end 3128 // rbx: object end
3127 // rdx: instance size in bytes 3129 // rdx: instance size in bytes
3128 if (os::is_MP()) { 3130 if (os::is_MP()) {
3129 __ lock(); 3131 __ lock();
3130 } 3132 }
3131 __ cmpxchgq(rbx, Address(RtopAddr, 0)); 3133 __ cmpxchgptr(rbx, Address(RtopAddr, 0));
3132 3134
3133 // if someone beat us on the allocation, try again, otherwise continue 3135 // if someone beat us on the allocation, try again, otherwise continue
3134 __ jcc(Assembler::notEqual, retry); 3136 __ jcc(Assembler::notEqual, retry);
3135 } 3137 }
3136 3138
3155 } 3157 }
3156 3158
3157 // initialize object header only. 3159 // initialize object header only.
3158 __ bind(initialize_header); 3160 __ bind(initialize_header);
3159 if (UseBiasedLocking) { 3161 if (UseBiasedLocking) {
3160 __ movq(rscratch1, Address(rsi, Klass::prototype_header_offset_in_bytes() + klassOopDesc::klass_part_offset_in_bytes())); 3162 __ movptr(rscratch1, Address(rsi, Klass::prototype_header_offset_in_bytes() + klassOopDesc::klass_part_offset_in_bytes()));
3161 __ movq(Address(rax, oopDesc::mark_offset_in_bytes()), rscratch1); 3163 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()), rscratch1);
3162 } else { 3164 } else {
3163 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()), 3165 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
3164 (intptr_t) markOopDesc::prototype()); // header (address 0x1) 3166 (intptr_t) markOopDesc::prototype()); // header (address 0x1)
3165 } 3167 }
3166 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code) 3168 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
3213 } 3215 }
3214 3216
3215 void TemplateTable::checkcast() { 3217 void TemplateTable::checkcast() {
3216 transition(atos, atos); 3218 transition(atos, atos);
3217 Label done, is_null, ok_is_subtype, quicked, resolved; 3219 Label done, is_null, ok_is_subtype, quicked, resolved;
3218 __ testq(rax, rax); // object is in rax 3220 __ testptr(rax, rax); // object is in rax
3219 __ jcc(Assembler::zero, is_null); 3221 __ jcc(Assembler::zero, is_null);
3220 3222
3221 // Get cpool & tags index 3223 // Get cpool & tags index
3222 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array 3224 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3223 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index 3225 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3226 Address::times_1, 3228 Address::times_1,
3227 typeArrayOopDesc::header_size(T_BYTE) * wordSize), 3229 typeArrayOopDesc::header_size(T_BYTE) * wordSize),
3228 JVM_CONSTANT_Class); 3230 JVM_CONSTANT_Class);
3229 __ jcc(Assembler::equal, quicked); 3231 __ jcc(Assembler::equal, quicked);
3230 __ push(atos); // save receiver for result, and for GC 3232 __ push(atos); // save receiver for result, and for GC
3231 __ movq(r12, rcx); // save rcx XXX 3233 __ mov(r12, rcx); // save rcx XXX
3232 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc)); 3234 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3233 __ movq(rcx, r12); // restore rcx XXX 3235 __ movq(rcx, r12); // restore rcx XXX
3234 __ reinit_heapbase(); 3236 __ reinit_heapbase();
3235 __ pop_ptr(rdx); // restore receiver 3237 __ pop_ptr(rdx); // restore receiver
3236 __ jmpb(resolved); 3238 __ jmpb(resolved);
3237 3239
3238 // Get superklass in rax and subklass in rbx 3240 // Get superklass in rax and subklass in rbx
3239 __ bind(quicked); 3241 __ bind(quicked);
3240 __ movq(rdx, rax); // Save object in rdx; rax needed for subtype check 3242 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
3241 __ movq(rax, Address(rcx, rbx, 3243 __ movptr(rax, Address(rcx, rbx,
3242 Address::times_8, sizeof(constantPoolOopDesc))); 3244 Address::times_8, sizeof(constantPoolOopDesc)));
3243 3245
3244 __ bind(resolved); 3246 __ bind(resolved);
3245 __ load_klass(rbx, rdx); 3247 __ load_klass(rbx, rdx);
3246 3248
3253 // object is at TOS 3255 // object is at TOS
3254 __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry)); 3256 __ jump(ExternalAddress(Interpreter::_throw_ClassCastException_entry));
3255 3257
3256 // Come here on success 3258 // Come here on success
3257 __ bind(ok_is_subtype); 3259 __ bind(ok_is_subtype);
3258 __ movq(rax, rdx); // Restore object in rdx 3260 __ mov(rax, rdx); // Restore object in rdx
3259 3261
3260 // Collect counts on whether this check-cast sees NULLs a lot or not. 3262 // Collect counts on whether this check-cast sees NULLs a lot or not.
3261 if (ProfileInterpreter) { 3263 if (ProfileInterpreter) {
3262 __ jmp(done); 3264 __ jmp(done);
3263 __ bind(is_null); 3265 __ bind(is_null);
3269 } 3271 }
3270 3272
3271 void TemplateTable::instanceof() { 3273 void TemplateTable::instanceof() {
3272 transition(atos, itos); 3274 transition(atos, itos);
3273 Label done, is_null, ok_is_subtype, quicked, resolved; 3275 Label done, is_null, ok_is_subtype, quicked, resolved;
3274 __ testq(rax, rax); 3276 __ testptr(rax, rax);
3275 __ jcc(Assembler::zero, is_null); 3277 __ jcc(Assembler::zero, is_null);
3276 3278
3277 // Get cpool & tags index 3279 // Get cpool & tags index
3278 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array 3280 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3279 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index 3281 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3283 typeArrayOopDesc::header_size(T_BYTE) * wordSize), 3285 typeArrayOopDesc::header_size(T_BYTE) * wordSize),
3284 JVM_CONSTANT_Class); 3286 JVM_CONSTANT_Class);
3285 __ jcc(Assembler::equal, quicked); 3287 __ jcc(Assembler::equal, quicked);
3286 3288
3287 __ push(atos); // save receiver for result, and for GC 3289 __ push(atos); // save receiver for result, and for GC
3288 __ movq(r12, rcx); // save rcx 3290 __ mov(r12, rcx); // save rcx
3289 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc)); 3291 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3290 __ movq(rcx, r12); // restore rcx 3292 __ movq(rcx, r12); // restore rcx
3291 __ reinit_heapbase(); 3293 __ reinit_heapbase();
3292 __ pop_ptr(rdx); // restore receiver 3294 __ pop_ptr(rdx); // restore receiver
3293 __ load_klass(rdx, rdx); 3295 __ load_klass(rdx, rdx);
3294 __ jmpb(resolved); 3296 __ jmpb(resolved);
3295 3297
3296 // Get superklass in rax and subklass in rdx 3298 // Get superklass in rax and subklass in rdx
3297 __ bind(quicked); 3299 __ bind(quicked);
3298 __ load_klass(rdx, rax); 3300 __ load_klass(rdx, rax);
3299 __ movq(rax, Address(rcx, rbx, 3301 __ movptr(rax, Address(rcx, rbx,
3300 Address::times_8, sizeof(constantPoolOopDesc))); 3302 Address::times_8, sizeof(constantPoolOopDesc)));
3301 3303
3302 __ bind(resolved); 3304 __ bind(resolved);
3303 3305
3304 // Generate subtype check. Blows rcx, rdi 3306 // Generate subtype check. Blows rcx, rdi
3305 // Superklass in rax. Subklass in rdx. 3307 // Superklass in rax. Subklass in rdx.
3338 __ get_method(c_rarg1); 3340 __ get_method(c_rarg1);
3339 __ call_VM(noreg, 3341 __ call_VM(noreg,
3340 CAST_FROM_FN_PTR(address, 3342 CAST_FROM_FN_PTR(address,
3341 InterpreterRuntime::get_original_bytecode_at), 3343 InterpreterRuntime::get_original_bytecode_at),
3342 c_rarg1, r13); 3344 c_rarg1, r13);
3343 __ movq(rbx, rax); 3345 __ mov(rbx, rax);
3344 3346
3345 // post the breakpoint event 3347 // post the breakpoint event
3346 __ get_method(c_rarg1); 3348 __ get_method(c_rarg1);
3347 __ call_VM(noreg, 3349 __ call_VM(noreg,
3348 CAST_FROM_FN_PTR(address, InterpreterRuntime::_breakpoint), 3350 CAST_FROM_FN_PTR(address, InterpreterRuntime::_breakpoint),
3396 __ xorl(c_rarg1, c_rarg1); // points to free slot or NULL 3398 __ xorl(c_rarg1, c_rarg1); // points to free slot or NULL
3397 3399
3398 // find a free slot in the monitor block (result in c_rarg1) 3400 // find a free slot in the monitor block (result in c_rarg1)
3399 { 3401 {
3400 Label entry, loop, exit; 3402 Label entry, loop, exit;
3401 __ movq(c_rarg3, monitor_block_top); // points to current entry, 3403 __ movptr(c_rarg3, monitor_block_top); // points to current entry,
3402 // starting with top-most entry 3404 // starting with top-most entry
3403 __ leaq(c_rarg2, monitor_block_bot); // points to word before bottom 3405 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3404 // of monitor block 3406 // of monitor block
3405 __ jmpb(entry); 3407 __ jmpb(entry);
3406 3408
3407 __ bind(loop); 3409 __ bind(loop);
3408 // check if current entry is used 3410 // check if current entry is used
3409 __ cmpq(Address(c_rarg3, BasicObjectLock::obj_offset_in_bytes()), (int) NULL); 3411 __ cmpptr(Address(c_rarg3, BasicObjectLock::obj_offset_in_bytes()), (int32_t) NULL_WORD);
3410 // if not used then remember entry in c_rarg1 3412 // if not used then remember entry in c_rarg1
3411 __ cmovq(Assembler::equal, c_rarg1, c_rarg3); 3413 __ cmov(Assembler::equal, c_rarg1, c_rarg3);
3412 // check if current entry is for same object 3414 // check if current entry is for same object
3413 __ cmpq(rax, Address(c_rarg3, BasicObjectLock::obj_offset_in_bytes())); 3415 __ cmpptr(rax, Address(c_rarg3, BasicObjectLock::obj_offset_in_bytes()));
3414 // if same object then stop searching 3416 // if same object then stop searching
3415 __ jccb(Assembler::equal, exit); 3417 __ jccb(Assembler::equal, exit);
3416 // otherwise advance to next entry 3418 // otherwise advance to next entry
3417 __ addq(c_rarg3, entry_size); 3419 __ addptr(c_rarg3, entry_size);
3418 __ bind(entry); 3420 __ bind(entry);
3419 // check if bottom reached 3421 // check if bottom reached
3420 __ cmpq(c_rarg3, c_rarg2); 3422 __ cmpptr(c_rarg3, c_rarg2);
3421 // if not at bottom then check this entry 3423 // if not at bottom then check this entry
3422 __ jcc(Assembler::notEqual, loop); 3424 __ jcc(Assembler::notEqual, loop);
3423 __ bind(exit); 3425 __ bind(exit);
3424 } 3426 }
3425 3427
3426 __ testq(c_rarg1, c_rarg1); // check if a slot has been found 3428 __ testptr(c_rarg1, c_rarg1); // check if a slot has been found
3427 __ jcc(Assembler::notZero, allocated); // if found, continue with that one 3429 __ jcc(Assembler::notZero, allocated); // if found, continue with that one
3428 3430
3429 // allocate one if there's no free slot 3431 // allocate one if there's no free slot
3430 { 3432 {
3431 Label entry, loop; 3433 Label entry, loop;
3432 // 1. compute new pointers // rsp: old expression stack top 3434 // 1. compute new pointers // rsp: old expression stack top
3433 __ movq(c_rarg1, monitor_block_bot); // c_rarg1: old expression stack bottom 3435 __ movptr(c_rarg1, monitor_block_bot); // c_rarg1: old expression stack bottom
3434 __ subq(rsp, entry_size); // move expression stack top 3436 __ subptr(rsp, entry_size); // move expression stack top
3435 __ subq(c_rarg1, entry_size); // move expression stack bottom 3437 __ subptr(c_rarg1, entry_size); // move expression stack bottom
3436 __ movq(c_rarg3, rsp); // set start value for copy loop 3438 __ mov(c_rarg3, rsp); // set start value for copy loop
3437 __ movq(monitor_block_bot, c_rarg1); // set new monitor block bottom 3439 __ movptr(monitor_block_bot, c_rarg1); // set new monitor block bottom
3438 __ jmp(entry); 3440 __ jmp(entry);
3439 // 2. move expression stack contents 3441 // 2. move expression stack contents
3440 __ bind(loop); 3442 __ bind(loop);
3441 __ movq(c_rarg2, Address(c_rarg3, entry_size)); // load expression stack 3443 __ movptr(c_rarg2, Address(c_rarg3, entry_size)); // load expression stack
3442 // word from old location 3444 // word from old location
3443 __ movq(Address(c_rarg3, 0), c_rarg2); // and store it at new location 3445 __ movptr(Address(c_rarg3, 0), c_rarg2); // and store it at new location
3444 __ addq(c_rarg3, wordSize); // advance to next word 3446 __ addptr(c_rarg3, wordSize); // advance to next word
3445 __ bind(entry); 3447 __ bind(entry);
3446 __ cmpq(c_rarg3, c_rarg1); // check if bottom reached 3448 __ cmpptr(c_rarg3, c_rarg1); // check if bottom reached
3447 __ jcc(Assembler::notEqual, loop); // if not at bottom then 3449 __ jcc(Assembler::notEqual, loop); // if not at bottom then
3448 // copy next word 3450 // copy next word
3449 } 3451 }
3450 3452
3451 // call run-time routine 3453 // call run-time routine
3454 3456
3455 // Increment bcp to point to the next bytecode, so exception 3457 // Increment bcp to point to the next bytecode, so exception
3456 // handling for async. exceptions work correctly. 3458 // handling for async. exceptions work correctly.
3457 // The object has already been poped from the stack, so the 3459 // The object has already been poped from the stack, so the
3458 // expression stack looks correct. 3460 // expression stack looks correct.
3459 __ incrementq(r13); 3461 __ increment(r13);
3460 3462
3461 // store object 3463 // store object
3462 __ movq(Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()), rax); 3464 __ movptr(Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()), rax);
3463 __ lock_object(c_rarg1); 3465 __ lock_object(c_rarg1);
3464 3466
3465 // check to make sure this monitor doesn't cause stack overflow after locking 3467 // check to make sure this monitor doesn't cause stack overflow after locking
3466 __ save_bcp(); // in case of exception 3468 __ save_bcp(); // in case of exception
3467 __ generate_stack_overflow_check(0); 3469 __ generate_stack_overflow_check(0);
3487 Label found; 3489 Label found;
3488 3490
3489 // find matching slot 3491 // find matching slot
3490 { 3492 {
3491 Label entry, loop; 3493 Label entry, loop;
3492 __ movq(c_rarg1, monitor_block_top); // points to current entry, 3494 __ movptr(c_rarg1, monitor_block_top); // points to current entry,
3493 // starting with top-most entry 3495 // starting with top-most entry
3494 __ leaq(c_rarg2, monitor_block_bot); // points to word before bottom 3496 __ lea(c_rarg2, monitor_block_bot); // points to word before bottom
3495 // of monitor block 3497 // of monitor block
3496 __ jmpb(entry); 3498 __ jmpb(entry);
3497 3499
3498 __ bind(loop); 3500 __ bind(loop);
3499 // check if current entry is for same object 3501 // check if current entry is for same object
3500 __ cmpq(rax, Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes())); 3502 __ cmpptr(rax, Address(c_rarg1, BasicObjectLock::obj_offset_in_bytes()));
3501 // if same object then stop searching 3503 // if same object then stop searching
3502 __ jcc(Assembler::equal, found); 3504 __ jcc(Assembler::equal, found);
3503 // otherwise advance to next entry 3505 // otherwise advance to next entry
3504 __ addq(c_rarg1, entry_size); 3506 __ addptr(c_rarg1, entry_size);
3505 __ bind(entry); 3507 __ bind(entry);
3506 // check if bottom reached 3508 // check if bottom reached
3507 __ cmpq(c_rarg1, c_rarg2); 3509 __ cmpptr(c_rarg1, c_rarg2);
3508 // if not at bottom then check this entry 3510 // if not at bottom then check this entry
3509 __ jcc(Assembler::notEqual, loop); 3511 __ jcc(Assembler::notEqual, loop);
3510 } 3512 }
3511 3513
3512 // error handling. Unlocking was not block-structured 3514 // error handling. Unlocking was not block-structured
3539 transition(vtos, atos); 3541 transition(vtos, atos);
3540 __ load_unsigned_byte(rax, at_bcp(3)); // get number of dimensions 3542 __ load_unsigned_byte(rax, at_bcp(3)); // get number of dimensions
3541 // last dim is on top of stack; we want address of first one: 3543 // last dim is on top of stack; we want address of first one:
3542 // first_addr = last_addr + (ndims - 1) * wordSize 3544 // first_addr = last_addr + (ndims - 1) * wordSize
3543 if (TaggedStackInterpreter) __ shll(rax, 1); // index*2 3545 if (TaggedStackInterpreter) __ shll(rax, 1); // index*2
3544 __ leaq(c_rarg1, Address(rsp, rax, Address::times_8, -wordSize)); 3546 __ lea(c_rarg1, Address(rsp, rax, Address::times_8, -wordSize));
3545 call_VM(rax, 3547 call_VM(rax,
3546 CAST_FROM_FN_PTR(address, InterpreterRuntime::multianewarray), 3548 CAST_FROM_FN_PTR(address, InterpreterRuntime::multianewarray),
3547 c_rarg1); 3549 c_rarg1);
3548 __ load_unsigned_byte(rbx, at_bcp(3)); 3550 __ load_unsigned_byte(rbx, at_bcp(3));
3549 if (TaggedStackInterpreter) __ shll(rbx, 1); // index*2 3551 if (TaggedStackInterpreter) __ shll(rbx, 1); // index*2
3550 __ leaq(rsp, Address(rsp, rbx, Address::times_8)); 3552 __ lea(rsp, Address(rsp, rbx, Address::times_8));
3551 } 3553 }
3554 #endif // !CC_INTERP