comparison src/cpu/x86/vm/stubGenerator_x86_64.cpp @ 4137:04b9a2566eec

Merge with hsx23/hotspot.
author Thomas Wuerthinger <thomas.wuerthinger@oracle.com>
date Sat, 17 Dec 2011 21:40:27 +0100
parents 59bc0d4d9ea3
children 82af018d61db
comparison
equal deleted inserted replaced
3737:9dc19b7d89a3 4137:04b9a2566eec
45 # include "thread_solaris.inline.hpp" 45 # include "thread_solaris.inline.hpp"
46 #endif 46 #endif
47 #ifdef TARGET_OS_FAMILY_windows 47 #ifdef TARGET_OS_FAMILY_windows
48 # include "thread_windows.inline.hpp" 48 # include "thread_windows.inline.hpp"
49 #endif 49 #endif
50 #ifdef TARGET_OS_FAMILY_bsd
51 # include "thread_bsd.inline.hpp"
52 #endif
50 #ifdef COMPILER2 53 #ifdef COMPILER2
51 #include "opto/runtime.hpp" 54 #include "opto/runtime.hpp"
52 #endif 55 #endif
53 56
54 // Declaration and definition of StubGenerator (no .hpp file). 57 // Declaration and definition of StubGenerator (no .hpp file).
90 93
91 #ifdef PRODUCT 94 #ifdef PRODUCT
92 #define inc_counter_np(counter) (0) 95 #define inc_counter_np(counter) (0)
93 #else 96 #else
94 void inc_counter_np_(int& counter) { 97 void inc_counter_np_(int& counter) {
98 // This can destroy rscratch1 if counter is far from the code cache
95 __ incrementl(ExternalAddress((address)&counter)); 99 __ incrementl(ExternalAddress((address)&counter));
96 } 100 }
97 #define inc_counter_np(counter) \ 101 #define inc_counter_np(counter) \
98 BLOCK_COMMENT("inc_counter " #counter); \ 102 BLOCK_COMMENT("inc_counter " #counter); \
99 inc_counter_np_(counter); 103 inc_counter_np_(counter);
933 937
934 __ push(0); // hole for return address-to-be 938 __ push(0); // hole for return address-to-be
935 __ pusha(); // push registers 939 __ pusha(); // push registers
936 Address next_pc(rsp, RegisterImpl::number_of_registers * BytesPerWord); 940 Address next_pc(rsp, RegisterImpl::number_of_registers * BytesPerWord);
937 941
942 // FIXME: this probably needs alignment logic
943
938 __ subptr(rsp, frame::arg_reg_save_area_bytes); 944 __ subptr(rsp, frame::arg_reg_save_area_bytes);
939 BLOCK_COMMENT("call handle_unsafe_access"); 945 BLOCK_COMMENT("call handle_unsafe_access");
940 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, handle_unsafe_access))); 946 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, handle_unsafe_access)));
941 __ addptr(rsp, frame::arg_reg_save_area_bytes); 947 __ addptr(rsp, frame::arg_reg_save_area_bytes);
942 948
1261 __ addptr(end, BytesPerHeapOop); 1267 __ addptr(end, BytesPerHeapOop);
1262 __ shrptr(end, CardTableModRefBS::card_shift); 1268 __ shrptr(end, CardTableModRefBS::card_shift);
1263 __ subptr(end, start); // number of bytes to copy 1269 __ subptr(end, start); // number of bytes to copy
1264 1270
1265 intptr_t disp = (intptr_t) ct->byte_map_base; 1271 intptr_t disp = (intptr_t) ct->byte_map_base;
1266 if (__ is_simm32(disp)) { 1272 if (Assembler::is_simm32(disp)) {
1267 Address cardtable(noreg, noreg, Address::no_scale, disp); 1273 Address cardtable(noreg, noreg, Address::no_scale, disp);
1268 __ lea(scratch, cardtable); 1274 __ lea(scratch, cardtable);
1269 } else { 1275 } else {
1270 ExternalAddress cardtable((address)disp); 1276 ExternalAddress cardtable((address)disp);
1271 __ lea(scratch, cardtable); 1277 __ lea(scratch, cardtable);
1459 __ jccb(Assembler::zero, L_exit); 1465 __ jccb(Assembler::zero, L_exit);
1460 __ movb(rax, Address(end_from, 8)); 1466 __ movb(rax, Address(end_from, 8));
1461 __ movb(Address(end_to, 8), rax); 1467 __ movb(Address(end_to, 8), rax);
1462 1468
1463 __ BIND(L_exit); 1469 __ BIND(L_exit);
1464 inc_counter_np(SharedRuntime::_jbyte_array_copy_ctr);
1465 restore_arg_regs(); 1470 restore_arg_regs();
1471 inc_counter_np(SharedRuntime::_jbyte_array_copy_ctr); // Update counter after rscratch1 is free
1466 __ xorptr(rax, rax); // return 0 1472 __ xorptr(rax, rax); // return 0
1467 __ leave(); // required for proper stackwalking of RuntimeStub frame 1473 __ leave(); // required for proper stackwalking of RuntimeStub frame
1468 __ ret(0); 1474 __ ret(0);
1469 1475
1470 // Copy in 32-bytes chunks 1476 // Copy in 32-bytes chunks
1548 __ movq(rax, Address(from, qword_count, Address::times_8, -8)); 1554 __ movq(rax, Address(from, qword_count, Address::times_8, -8));
1549 __ movq(Address(to, qword_count, Address::times_8, -8), rax); 1555 __ movq(Address(to, qword_count, Address::times_8, -8), rax);
1550 __ decrement(qword_count); 1556 __ decrement(qword_count);
1551 __ jcc(Assembler::notZero, L_copy_8_bytes); 1557 __ jcc(Assembler::notZero, L_copy_8_bytes);
1552 1558
1553 inc_counter_np(SharedRuntime::_jbyte_array_copy_ctr);
1554 restore_arg_regs(); 1559 restore_arg_regs();
1560 inc_counter_np(SharedRuntime::_jbyte_array_copy_ctr); // Update counter after rscratch1 is free
1555 __ xorptr(rax, rax); // return 0 1561 __ xorptr(rax, rax); // return 0
1556 __ leave(); // required for proper stackwalking of RuntimeStub frame 1562 __ leave(); // required for proper stackwalking of RuntimeStub frame
1557 __ ret(0); 1563 __ ret(0);
1558 1564
1559 // Copy in 32-bytes chunks 1565 // Copy in 32-bytes chunks
1560 copy_32_bytes_backward(from, to, qword_count, rax, L_copy_32_bytes, L_copy_8_bytes); 1566 copy_32_bytes_backward(from, to, qword_count, rax, L_copy_32_bytes, L_copy_8_bytes);
1561 1567
1562 inc_counter_np(SharedRuntime::_jbyte_array_copy_ctr);
1563 restore_arg_regs(); 1568 restore_arg_regs();
1569 inc_counter_np(SharedRuntime::_jbyte_array_copy_ctr); // Update counter after rscratch1 is free
1564 __ xorptr(rax, rax); // return 0 1570 __ xorptr(rax, rax); // return 0
1565 __ leave(); // required for proper stackwalking of RuntimeStub frame 1571 __ leave(); // required for proper stackwalking of RuntimeStub frame
1566 __ ret(0); 1572 __ ret(0);
1567 1573
1568 return start; 1574 return start;
1651 __ jccb(Assembler::zero, L_exit); 1657 __ jccb(Assembler::zero, L_exit);
1652 __ movw(rax, Address(end_from, 8)); 1658 __ movw(rax, Address(end_from, 8));
1653 __ movw(Address(end_to, 8), rax); 1659 __ movw(Address(end_to, 8), rax);
1654 1660
1655 __ BIND(L_exit); 1661 __ BIND(L_exit);
1656 inc_counter_np(SharedRuntime::_jshort_array_copy_ctr);
1657 restore_arg_regs(); 1662 restore_arg_regs();
1663 inc_counter_np(SharedRuntime::_jshort_array_copy_ctr); // Update counter after rscratch1 is free
1658 __ xorptr(rax, rax); // return 0 1664 __ xorptr(rax, rax); // return 0
1659 __ leave(); // required for proper stackwalking of RuntimeStub frame 1665 __ leave(); // required for proper stackwalking of RuntimeStub frame
1660 __ ret(0); 1666 __ ret(0);
1661 1667
1662 // Copy in 32-bytes chunks 1668 // Copy in 32-bytes chunks
1752 __ movq(rax, Address(from, qword_count, Address::times_8, -8)); 1758 __ movq(rax, Address(from, qword_count, Address::times_8, -8));
1753 __ movq(Address(to, qword_count, Address::times_8, -8), rax); 1759 __ movq(Address(to, qword_count, Address::times_8, -8), rax);
1754 __ decrement(qword_count); 1760 __ decrement(qword_count);
1755 __ jcc(Assembler::notZero, L_copy_8_bytes); 1761 __ jcc(Assembler::notZero, L_copy_8_bytes);
1756 1762
1757 inc_counter_np(SharedRuntime::_jshort_array_copy_ctr);
1758 restore_arg_regs(); 1763 restore_arg_regs();
1764 inc_counter_np(SharedRuntime::_jshort_array_copy_ctr); // Update counter after rscratch1 is free
1759 __ xorptr(rax, rax); // return 0 1765 __ xorptr(rax, rax); // return 0
1760 __ leave(); // required for proper stackwalking of RuntimeStub frame 1766 __ leave(); // required for proper stackwalking of RuntimeStub frame
1761 __ ret(0); 1767 __ ret(0);
1762 1768
1763 // Copy in 32-bytes chunks 1769 // Copy in 32-bytes chunks
1764 copy_32_bytes_backward(from, to, qword_count, rax, L_copy_32_bytes, L_copy_8_bytes); 1770 copy_32_bytes_backward(from, to, qword_count, rax, L_copy_32_bytes, L_copy_8_bytes);
1765 1771
1766 inc_counter_np(SharedRuntime::_jshort_array_copy_ctr);
1767 restore_arg_regs(); 1772 restore_arg_regs();
1773 inc_counter_np(SharedRuntime::_jshort_array_copy_ctr); // Update counter after rscratch1 is free
1768 __ xorptr(rax, rax); // return 0 1774 __ xorptr(rax, rax); // return 0
1769 __ leave(); // required for proper stackwalking of RuntimeStub frame 1775 __ leave(); // required for proper stackwalking of RuntimeStub frame
1770 __ ret(0); 1776 __ ret(0);
1771 1777
1772 return start; 1778 return start;
1852 __ BIND(L_exit); 1858 __ BIND(L_exit);
1853 if (is_oop) { 1859 if (is_oop) {
1854 __ leaq(end_to, Address(saved_to, dword_count, Address::times_4, -4)); 1860 __ leaq(end_to, Address(saved_to, dword_count, Address::times_4, -4));
1855 gen_write_ref_array_post_barrier(saved_to, end_to, rax); 1861 gen_write_ref_array_post_barrier(saved_to, end_to, rax);
1856 } 1862 }
1857 inc_counter_np(SharedRuntime::_jint_array_copy_ctr);
1858 restore_arg_regs(); 1863 restore_arg_regs();
1864 inc_counter_np(SharedRuntime::_jint_array_copy_ctr); // Update counter after rscratch1 is free
1859 __ xorptr(rax, rax); // return 0 1865 __ xorptr(rax, rax); // return 0
1860 __ leave(); // required for proper stackwalking of RuntimeStub frame 1866 __ leave(); // required for proper stackwalking of RuntimeStub frame
1861 __ ret(0); 1867 __ ret(0);
1862 1868
1863 // Copy 32-bytes chunks 1869 // Copy 32-bytes chunks
1933 __ movq(rax, Address(from, qword_count, Address::times_8, -8)); 1939 __ movq(rax, Address(from, qword_count, Address::times_8, -8));
1934 __ movq(Address(to, qword_count, Address::times_8, -8), rax); 1940 __ movq(Address(to, qword_count, Address::times_8, -8), rax);
1935 __ decrement(qword_count); 1941 __ decrement(qword_count);
1936 __ jcc(Assembler::notZero, L_copy_8_bytes); 1942 __ jcc(Assembler::notZero, L_copy_8_bytes);
1937 1943
1938 inc_counter_np(SharedRuntime::_jint_array_copy_ctr);
1939 if (is_oop) { 1944 if (is_oop) {
1940 __ jmp(L_exit); 1945 __ jmp(L_exit);
1941 } 1946 }
1942 restore_arg_regs(); 1947 restore_arg_regs();
1948 inc_counter_np(SharedRuntime::_jint_array_copy_ctr); // Update counter after rscratch1 is free
1943 __ xorptr(rax, rax); // return 0 1949 __ xorptr(rax, rax); // return 0
1944 __ leave(); // required for proper stackwalking of RuntimeStub frame 1950 __ leave(); // required for proper stackwalking of RuntimeStub frame
1945 __ ret(0); 1951 __ ret(0);
1946 1952
1947 // Copy in 32-bytes chunks 1953 // Copy in 32-bytes chunks
1948 copy_32_bytes_backward(from, to, qword_count, rax, L_copy_32_bytes, L_copy_8_bytes); 1954 copy_32_bytes_backward(from, to, qword_count, rax, L_copy_32_bytes, L_copy_8_bytes);
1949 1955
1950 inc_counter_np(SharedRuntime::_jint_array_copy_ctr);
1951 __ bind(L_exit); 1956 __ bind(L_exit);
1952 if (is_oop) { 1957 if (is_oop) {
1953 Register end_to = rdx; 1958 Register end_to = rdx;
1954 __ leaq(end_to, Address(to, dword_count, Address::times_4, -4)); 1959 __ leaq(end_to, Address(to, dword_count, Address::times_4, -4));
1955 gen_write_ref_array_post_barrier(to, end_to, rax); 1960 gen_write_ref_array_post_barrier(to, end_to, rax);
1956 } 1961 }
1957 restore_arg_regs(); 1962 restore_arg_regs();
1963 inc_counter_np(SharedRuntime::_jint_array_copy_ctr); // Update counter after rscratch1 is free
1958 __ xorptr(rax, rax); // return 0 1964 __ xorptr(rax, rax); // return 0
1959 __ leave(); // required for proper stackwalking of RuntimeStub frame 1965 __ leave(); // required for proper stackwalking of RuntimeStub frame
1960 __ ret(0); 1966 __ ret(0);
1961 1967
1962 return start; 1968 return start;
2025 __ jcc(Assembler::notZero, L_copy_8_bytes); 2031 __ jcc(Assembler::notZero, L_copy_8_bytes);
2026 2032
2027 if (is_oop) { 2033 if (is_oop) {
2028 __ jmp(L_exit); 2034 __ jmp(L_exit);
2029 } else { 2035 } else {
2030 inc_counter_np(SharedRuntime::_jlong_array_copy_ctr);
2031 restore_arg_regs(); 2036 restore_arg_regs();
2037 inc_counter_np(SharedRuntime::_jlong_array_copy_ctr); // Update counter after rscratch1 is free
2032 __ xorptr(rax, rax); // return 0 2038 __ xorptr(rax, rax); // return 0
2033 __ leave(); // required for proper stackwalking of RuntimeStub frame 2039 __ leave(); // required for proper stackwalking of RuntimeStub frame
2034 __ ret(0); 2040 __ ret(0);
2035 } 2041 }
2036 2042
2038 copy_32_bytes_forward(end_from, end_to, qword_count, rax, L_copy_32_bytes, L_copy_8_bytes); 2044 copy_32_bytes_forward(end_from, end_to, qword_count, rax, L_copy_32_bytes, L_copy_8_bytes);
2039 2045
2040 if (is_oop) { 2046 if (is_oop) {
2041 __ BIND(L_exit); 2047 __ BIND(L_exit);
2042 gen_write_ref_array_post_barrier(saved_to, end_to, rax); 2048 gen_write_ref_array_post_barrier(saved_to, end_to, rax);
2043 inc_counter_np(SharedRuntime::_oop_array_copy_ctr); 2049 }
2050 restore_arg_regs();
2051 if (is_oop) {
2052 inc_counter_np(SharedRuntime::_oop_array_copy_ctr); // Update counter after rscratch1 is free
2044 } else { 2053 } else {
2045 inc_counter_np(SharedRuntime::_jlong_array_copy_ctr); 2054 inc_counter_np(SharedRuntime::_jlong_array_copy_ctr); // Update counter after rscratch1 is free
2046 } 2055 }
2047 restore_arg_regs();
2048 __ xorptr(rax, rax); // return 0 2056 __ xorptr(rax, rax); // return 0
2049 __ leave(); // required for proper stackwalking of RuntimeStub frame 2057 __ leave(); // required for proper stackwalking of RuntimeStub frame
2050 __ ret(0); 2058 __ ret(0);
2051 2059
2052 return start; 2060 return start;
2106 __ jcc(Assembler::notZero, L_copy_8_bytes); 2114 __ jcc(Assembler::notZero, L_copy_8_bytes);
2107 2115
2108 if (is_oop) { 2116 if (is_oop) {
2109 __ jmp(L_exit); 2117 __ jmp(L_exit);
2110 } else { 2118 } else {
2111 inc_counter_np(SharedRuntime::_jlong_array_copy_ctr);
2112 restore_arg_regs(); 2119 restore_arg_regs();
2120 inc_counter_np(SharedRuntime::_jlong_array_copy_ctr); // Update counter after rscratch1 is free
2113 __ xorptr(rax, rax); // return 0 2121 __ xorptr(rax, rax); // return 0
2114 __ leave(); // required for proper stackwalking of RuntimeStub frame 2122 __ leave(); // required for proper stackwalking of RuntimeStub frame
2115 __ ret(0); 2123 __ ret(0);
2116 } 2124 }
2117 2125
2120 2128
2121 if (is_oop) { 2129 if (is_oop) {
2122 __ BIND(L_exit); 2130 __ BIND(L_exit);
2123 __ lea(rcx, Address(to, saved_count, Address::times_8, -8)); 2131 __ lea(rcx, Address(to, saved_count, Address::times_8, -8));
2124 gen_write_ref_array_post_barrier(to, rcx, rax); 2132 gen_write_ref_array_post_barrier(to, rcx, rax);
2125 inc_counter_np(SharedRuntime::_oop_array_copy_ctr); 2133 }
2134 restore_arg_regs();
2135 if (is_oop) {
2136 inc_counter_np(SharedRuntime::_oop_array_copy_ctr); // Update counter after rscratch1 is free
2126 } else { 2137 } else {
2127 inc_counter_np(SharedRuntime::_jlong_array_copy_ctr); 2138 inc_counter_np(SharedRuntime::_jlong_array_copy_ctr); // Update counter after rscratch1 is free
2128 } 2139 }
2129 restore_arg_regs();
2130 __ xorptr(rax, rax); // return 0 2140 __ xorptr(rax, rax); // return 0
2131 __ leave(); // required for proper stackwalking of RuntimeStub frame 2141 __ leave(); // required for proper stackwalking of RuntimeStub frame
2132 __ ret(0); 2142 __ ret(0);
2133 2143
2134 return start; 2144 return start;
2324 2334
2325 // Common exit point (success or failure). 2335 // Common exit point (success or failure).
2326 __ BIND(L_done); 2336 __ BIND(L_done);
2327 __ movptr(r13, Address(rsp, saved_r13_offset * wordSize)); 2337 __ movptr(r13, Address(rsp, saved_r13_offset * wordSize));
2328 __ movptr(r14, Address(rsp, saved_r14_offset * wordSize)); 2338 __ movptr(r14, Address(rsp, saved_r14_offset * wordSize));
2329 inc_counter_np(SharedRuntime::_checkcast_array_copy_ctr);
2330 restore_arg_regs(); 2339 restore_arg_regs();
2340 inc_counter_np(SharedRuntime::_checkcast_array_copy_ctr); // Update counter after rscratch1 is free
2331 __ leave(); // required for proper stackwalking of RuntimeStub frame 2341 __ leave(); // required for proper stackwalking of RuntimeStub frame
2332 __ ret(0); 2342 __ ret(0);
2333 2343
2334 return start; 2344 return start;
2335 } 2345 }
2960 2970
2961 // This is an inlined and slightly modified version of call_VM 2971 // This is an inlined and slightly modified version of call_VM
2962 // which has the ability to fetch the return PC out of 2972 // which has the ability to fetch the return PC out of
2963 // thread-local storage and also sets up last_Java_sp slightly 2973 // thread-local storage and also sets up last_Java_sp slightly
2964 // differently than the real call_VM 2974 // differently than the real call_VM
2965 if (restore_saved_exception_pc) {
2966 __ movptr(rax,
2967 Address(r15_thread,
2968 in_bytes(JavaThread::saved_exception_pc_offset())));
2969 __ push(rax);
2970 }
2971 2975
2972 __ enter(); // required for proper stackwalking of RuntimeStub frame 2976 __ enter(); // required for proper stackwalking of RuntimeStub frame
2973 2977
2974 assert(is_even(framesize/2), "sp not 16-byte aligned"); 2978 assert(is_even(framesize/2), "sp not 16-byte aligned");
2975 2979
3079 // fabricate a RuntimeStub internally. 3083 // fabricate a RuntimeStub internally.
3080 StubRoutines::_throw_AbstractMethodError_entry = 3084 StubRoutines::_throw_AbstractMethodError_entry =
3081 generate_throw_exception("AbstractMethodError throw_exception", 3085 generate_throw_exception("AbstractMethodError throw_exception",
3082 CAST_FROM_FN_PTR(address, 3086 CAST_FROM_FN_PTR(address,
3083 SharedRuntime:: 3087 SharedRuntime::
3084 throw_AbstractMethodError), 3088 throw_AbstractMethodError));
3085 false);
3086 3089
3087 StubRoutines::_throw_IncompatibleClassChangeError_entry = 3090 StubRoutines::_throw_IncompatibleClassChangeError_entry =
3088 generate_throw_exception("IncompatibleClassChangeError throw_exception", 3091 generate_throw_exception("IncompatibleClassChangeError throw_exception",
3089 CAST_FROM_FN_PTR(address, 3092 CAST_FROM_FN_PTR(address,
3090 SharedRuntime:: 3093 SharedRuntime::
3091 throw_IncompatibleClassChangeError), 3094 throw_IncompatibleClassChangeError));
3092 false);
3093
3094 StubRoutines::_throw_ArithmeticException_entry =
3095 generate_throw_exception("ArithmeticException throw_exception",
3096 CAST_FROM_FN_PTR(address,
3097 SharedRuntime::
3098 throw_ArithmeticException),
3099 true);
3100
3101 StubRoutines::_throw_NullPointerException_entry =
3102 generate_throw_exception("NullPointerException throw_exception",
3103 CAST_FROM_FN_PTR(address,
3104 SharedRuntime::
3105 throw_NullPointerException),
3106 true);
3107 3095
3108 StubRoutines::_throw_NullPointerException_at_call_entry = 3096 StubRoutines::_throw_NullPointerException_at_call_entry =
3109 generate_throw_exception("NullPointerException at call throw_exception", 3097 generate_throw_exception("NullPointerException at call throw_exception",
3110 CAST_FROM_FN_PTR(address, 3098 CAST_FROM_FN_PTR(address,
3111 SharedRuntime:: 3099 SharedRuntime::
3112 throw_NullPointerException_at_call), 3100 throw_NullPointerException_at_call));
3113 false);
3114 3101
3115 StubRoutines::_throw_StackOverflowError_entry = 3102 StubRoutines::_throw_StackOverflowError_entry =
3116 generate_throw_exception("StackOverflowError throw_exception", 3103 generate_throw_exception("StackOverflowError throw_exception",
3117 CAST_FROM_FN_PTR(address, 3104 CAST_FROM_FN_PTR(address,
3118 SharedRuntime:: 3105 SharedRuntime::
3119 throw_StackOverflowError), 3106 throw_StackOverflowError));
3120 false);
3121 3107
3122 // entry points that are platform specific 3108 // entry points that are platform specific
3123 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup(); 3109 StubRoutines::x86::_f2i_fixup = generate_f2i_fixup();
3124 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup(); 3110 StubRoutines::x86::_f2l_fixup = generate_f2l_fixup();
3125 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup(); 3111 StubRoutines::x86::_d2i_fixup = generate_d2i_fixup();