comparison src/cpu/ppc/vm/macroAssembler_ppc.cpp @ 17803:31e80afe3fed

8035647: PPC64: Support for elf v2 abi. Summary: ELFv2 ABI used by the little endian PowerPC64 on Linux. Reviewed-by: kvn Contributed-by: asmundak@google.com
author goetz
date Thu, 06 Mar 2014 10:55:28 -0800
parents c668f307a4c0
children 58cf34613a72
comparison
equal deleted inserted replaced
17802:7c462558a08a 17803:31e80afe3fed
592 // Assert that we can identify the emitted call/jump. 592 // Assert that we can identify the emitted call/jump.
593 assert(is_bxx64_patchable_variant2_at((address)start_pc, link), 593 assert(is_bxx64_patchable_variant2_at((address)start_pc, link),
594 "can't identify emitted call"); 594 "can't identify emitted call");
595 } else { 595 } else {
596 // variant 1: 596 // variant 1:
597 597 #if defined(ABI_ELFv2)
598 nop();
599 calculate_address_from_global_toc(R12, dest, true, true, false);
600 mtctr(R12);
601 nop();
602 nop();
603 #else
598 mr(R0, R11); // spill R11 -> R0. 604 mr(R0, R11); // spill R11 -> R0.
599 605
600 // Load the destination address into CTR, 606 // Load the destination address into CTR,
601 // calculate destination relative to global toc. 607 // calculate destination relative to global toc.
602 calculate_address_from_global_toc(R11, dest, true, true, false); 608 calculate_address_from_global_toc(R11, dest, true, true, false);
603 609
604 mtctr(R11); 610 mtctr(R11);
605 mr(R11, R0); // spill R11 <- R0. 611 mr(R11, R0); // spill R11 <- R0.
606 nop(); 612 nop();
613 #endif
607 614
608 // do the call/jump 615 // do the call/jump
609 if (link) { 616 if (link) {
610 bctrl(); 617 bctrl();
611 } else{ 618 } else{
910 load_const(tmp, -offset); 917 load_const(tmp, -offset);
911 stdux(R1_SP, R1_SP, tmp); 918 stdux(R1_SP, R1_SP, tmp);
912 } 919 }
913 } 920 }
914 921
915 // Push a frame of size `bytes' plus abi112 on top. 922 // Push a frame of size `bytes' plus abi_reg_args on top.
916 void MacroAssembler::push_frame_abi112(unsigned int bytes, Register tmp) { 923 void MacroAssembler::push_frame_reg_args(unsigned int bytes, Register tmp) {
917 push_frame(bytes + frame::abi_112_size, tmp); 924 push_frame(bytes + frame::abi_reg_args_size, tmp);
918 } 925 }
919 926
920 // Setup up a new C frame with a spill area for non-volatile GPRs and 927 // Setup up a new C frame with a spill area for non-volatile GPRs and
921 // additional space for local variables. 928 // additional space for local variables.
922 void MacroAssembler::push_frame_abi112_nonvolatiles(unsigned int bytes, 929 void MacroAssembler::push_frame_reg_args_nonvolatiles(unsigned int bytes,
923 Register tmp) { 930 Register tmp) {
924 push_frame(bytes + frame::abi_112_size + frame::spill_nonvolatiles_size, tmp); 931 push_frame(bytes + frame::abi_reg_args_size + frame::spill_nonvolatiles_size, tmp);
925 } 932 }
926 933
927 // Pop current C frame. 934 // Pop current C frame.
928 void MacroAssembler::pop_frame() { 935 void MacroAssembler::pop_frame() {
929 ld(R1_SP, _abi(callers_sp), R1_SP); 936 ld(R1_SP, _abi(callers_sp), R1_SP);
930 } 937 }
931 938
939 #if defined(ABI_ELFv2)
940 address MacroAssembler::branch_to(Register r_function_entry, bool and_link) {
941 // TODO(asmundak): make sure the caller uses R12 as function descriptor
942 // most of the times.
943 if (R12 != r_function_entry) {
944 mr(R12, r_function_entry);
945 }
946 mtctr(R12);
947 // Do a call or a branch.
948 if (and_link) {
949 bctrl();
950 } else {
951 bctr();
952 }
953 _last_calls_return_pc = pc();
954
955 return _last_calls_return_pc;
956 }
957
958 // Call a C function via a function descriptor and use full C
959 // calling conventions. Updates and returns _last_calls_return_pc.
960 address MacroAssembler::call_c(Register r_function_entry) {
961 return branch_to(r_function_entry, /*and_link=*/true);
962 }
963
964 // For tail calls: only branch, don't link, so callee returns to caller of this function.
965 address MacroAssembler::call_c_and_return_to_caller(Register r_function_entry) {
966 return branch_to(r_function_entry, /*and_link=*/false);
967 }
968
969 address MacroAssembler::call_c(address function_entry, relocInfo::relocType rt) {
970 load_const(R12, function_entry, R0);
971 return branch_to(R12, /*and_link=*/true);
972 }
973
974 #else
932 // Generic version of a call to C function via a function descriptor 975 // Generic version of a call to C function via a function descriptor
933 // with variable support for C calling conventions (TOC, ENV, etc.). 976 // with variable support for C calling conventions (TOC, ENV, etc.).
934 // Updates and returns _last_calls_return_pc. 977 // Updates and returns _last_calls_return_pc.
935 address MacroAssembler::branch_to(Register function_descriptor, bool and_link, bool save_toc_before_call, 978 address MacroAssembler::branch_to(Register function_descriptor, bool and_link, bool save_toc_before_call,
936 bool restore_toc_after_call, bool load_toc_of_callee, bool load_env_of_callee) { 979 bool restore_toc_after_call, bool load_toc_of_callee, bool load_env_of_callee) {
1075 bl64_patchable(fd->entry(), rt); 1118 bl64_patchable(fd->entry(), rt);
1076 _last_calls_return_pc = pc(); 1119 _last_calls_return_pc = pc();
1077 } 1120 }
1078 return _last_calls_return_pc; 1121 return _last_calls_return_pc;
1079 } 1122 }
1123 #endif
1080 1124
1081 void MacroAssembler::call_VM_base(Register oop_result, 1125 void MacroAssembler::call_VM_base(Register oop_result,
1082 Register last_java_sp, 1126 Register last_java_sp,
1083 address entry_point, 1127 address entry_point,
1084 bool check_exceptions) { 1128 bool check_exceptions) {
1089 } 1133 }
1090 set_top_ijava_frame_at_SP_as_last_Java_frame(last_java_sp, R11_scratch1); 1134 set_top_ijava_frame_at_SP_as_last_Java_frame(last_java_sp, R11_scratch1);
1091 1135
1092 // ARG1 must hold thread address. 1136 // ARG1 must hold thread address.
1093 mr(R3_ARG1, R16_thread); 1137 mr(R3_ARG1, R16_thread);
1094 1138 #if defined(ABI_ELFv2)
1139 address return_pc = call_c(entry_point, relocInfo::none);
1140 #else
1095 address return_pc = call_c((FunctionDescriptor*)entry_point, relocInfo::none); 1141 address return_pc = call_c((FunctionDescriptor*)entry_point, relocInfo::none);
1142 #endif
1096 1143
1097 reset_last_Java_frame(); 1144 reset_last_Java_frame();
1098 1145
1099 // Check for pending exceptions. 1146 // Check for pending exceptions.
1100 if (check_exceptions) { 1147 if (check_exceptions) {
1111 BLOCK_COMMENT("} call_VM"); 1158 BLOCK_COMMENT("} call_VM");
1112 } 1159 }
1113 1160
1114 void MacroAssembler::call_VM_leaf_base(address entry_point) { 1161 void MacroAssembler::call_VM_leaf_base(address entry_point) {
1115 BLOCK_COMMENT("call_VM_leaf {"); 1162 BLOCK_COMMENT("call_VM_leaf {");
1163 #if defined(ABI_ELFv2)
1164 call_c(entry_point, relocInfo::none);
1165 #else
1116 call_c(CAST_FROM_FN_PTR(FunctionDescriptor*, entry_point), relocInfo::none); 1166 call_c(CAST_FROM_FN_PTR(FunctionDescriptor*, entry_point), relocInfo::none);
1167 #endif
1117 BLOCK_COMMENT("} call_VM_leaf"); 1168 BLOCK_COMMENT("} call_VM_leaf");
1118 } 1169 }
1119 1170
1120 void MacroAssembler::call_VM(Register oop_result, address entry_point, bool check_exceptions) { 1171 void MacroAssembler::call_VM(Register oop_result, address entry_point, bool check_exceptions) {
1121 call_VM_base(oop_result, noreg, entry_point, check_exceptions); 1172 call_VM_base(oop_result, noreg, entry_point, check_exceptions);
2225 bind(runtime); 2276 bind(runtime);
2226 2277
2227 // VM call need frame to access(write) O register. 2278 // VM call need frame to access(write) O register.
2228 if (needs_frame) { 2279 if (needs_frame) {
2229 save_LR_CR(Rtmp1); 2280 save_LR_CR(Rtmp1);
2230 push_frame_abi112(0, Rtmp2); 2281 push_frame_reg_args(0, Rtmp2);
2231 } 2282 }
2232 2283
2233 if (Rpre_val->is_volatile() && Robj == noreg) mr(R31, Rpre_val); // Save pre_val across C call if it was preloaded. 2284 if (Rpre_val->is_volatile() && Robj == noreg) mr(R31, Rpre_val); // Save pre_val across C call if it was preloaded.
2234 call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::g1_wb_pre), Rpre_val, R16_thread); 2285 call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::g1_wb_pre), Rpre_val, R16_thread);
2235 if (Rpre_val->is_volatile() && Robj == noreg) mr(Rpre_val, R31); // restore 2286 if (Rpre_val->is_volatile() && Robj == noreg) mr(Rpre_val, R31); // restore
3004 address/* FunctionDescriptor** */fd = StubRoutines::verify_oop_subroutine_entry_address(); 3055 address/* FunctionDescriptor** */fd = StubRoutines::verify_oop_subroutine_entry_address();
3005 // save tmp 3056 // save tmp
3006 mr(R0, tmp); 3057 mr(R0, tmp);
3007 // kill tmp 3058 // kill tmp
3008 save_LR_CR(tmp); 3059 save_LR_CR(tmp);
3009 push_frame_abi112(nbytes_save, tmp); 3060 push_frame_reg_args(nbytes_save, tmp);
3010 // restore tmp 3061 // restore tmp
3011 mr(tmp, R0); 3062 mr(tmp, R0);
3012 save_volatile_gprs(R1_SP, 112); // except R0 3063 save_volatile_gprs(R1_SP, 112); // except R0
3013 // load FunctionDescriptor** 3064 // load FunctionDescriptor** / entry_address *
3014 load_const(tmp, fd); 3065 load_const(tmp, fd);
3015 // load FunctionDescriptor* 3066 // load FunctionDescriptor* / entry_address
3016 ld(tmp, 0, tmp); 3067 ld(tmp, 0, tmp);
3017 mr(R4_ARG2, oop); 3068 mr(R4_ARG2, oop);
3018 load_const(R3_ARG1, (address)msg); 3069 load_const(R3_ARG1, (address)msg);
3019 // call destination for its side effect 3070 // call destination for its side effect
3020 call_c(tmp); 3071 call_c(tmp);