comparison src/cpu/ppc/vm/methodHandles_ppc.cpp @ 14408:ec28f9c041ff

8019972: PPC64 (part 9): platform files for interpreter only VM. Summary: With this change the HotSpot core build works on Linux/PPC64. The VM succesfully executes simple test programs. Reviewed-by: kvn
author goetz
date Fri, 02 Aug 2013 16:46:45 +0200
parents
children 67fa91961822
comparison
equal deleted inserted replaced
14407:94c202aa2646 14408:ec28f9c041ff
1 /*
2 * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
3 * Copyright 2012, 2013 SAP AG. All rights reserved.
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This code is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 only, as
8 * published by the Free Software Foundation.
9 *
10 * This code is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13 * version 2 for more details (a copy is included in the LICENSE file that
14 * accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License version
17 * 2 along with this work; if not, write to the Free Software Foundation,
18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19 *
20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21 * or visit www.oracle.com if you need additional information or have any
22 * questions.
23 *
24 */
25
26 #include "precompiled.hpp"
27 #include "asm/macroAssembler.inline.hpp"
28 #include "interpreter/interpreter.hpp"
29 #include "memory/allocation.inline.hpp"
30 #include "prims/methodHandles.hpp"
31
32 #define __ _masm->
33
34 #ifdef PRODUCT
35 #define BLOCK_COMMENT(str) // nothing
36 #define STOP(error) stop(error)
37 #else
38 #define BLOCK_COMMENT(str) __ block_comment(str)
39 #define STOP(error) block_comment(error); __ stop(error)
40 #endif
41
42 #define BIND(label) bind(label); BLOCK_COMMENT(#label ":")
43
44 // Workaround for C++ overloading nastiness on '0' for RegisterOrConstant.
45 inline static RegisterOrConstant constant(int value) {
46 return RegisterOrConstant(value);
47 }
48
49 void MethodHandles::load_klass_from_Class(MacroAssembler* _masm, Register klass_reg, Register temp_reg, Register temp2_reg) {
50 if (VerifyMethodHandles)
51 verify_klass(_masm, klass_reg, SystemDictionary::WK_KLASS_ENUM_NAME(java_lang_Class), temp_reg, temp2_reg,
52 "MH argument is a Class");
53 __ ld(klass_reg, java_lang_Class::klass_offset_in_bytes(), klass_reg);
54 }
55
56 #ifdef ASSERT
57 static int check_nonzero(const char* xname, int x) {
58 assert(x != 0, err_msg("%s should be nonzero", xname));
59 return x;
60 }
61 #define NONZERO(x) check_nonzero(#x, x)
62 #else //ASSERT
63 #define NONZERO(x) (x)
64 #endif //ASSERT
65
66 #ifdef ASSERT
67 void MethodHandles::verify_klass(MacroAssembler* _masm,
68 Register obj_reg, SystemDictionary::WKID klass_id,
69 Register temp_reg, Register temp2_reg,
70 const char* error_message) {
71 Klass** klass_addr = SystemDictionary::well_known_klass_addr(klass_id);
72 KlassHandle klass = SystemDictionary::well_known_klass(klass_id);
73 Label L_ok, L_bad;
74 BLOCK_COMMENT("verify_klass {");
75 __ verify_oop(obj_reg);
76 __ cmpdi(CCR0, obj_reg, 0);
77 __ beq(CCR0, L_bad);
78 __ load_klass(temp_reg, obj_reg);
79 __ load_const_optimized(temp2_reg, (address) klass_addr);
80 __ ld(temp2_reg, 0, temp2_reg);
81 __ cmpd(CCR0, temp_reg, temp2_reg);
82 __ beq(CCR0, L_ok);
83 __ ld(temp_reg, klass->super_check_offset(), temp_reg);
84 __ cmpd(CCR0, temp_reg, temp2_reg);
85 __ beq(CCR0, L_ok);
86 __ BIND(L_bad);
87 __ stop(error_message);
88 __ BIND(L_ok);
89 BLOCK_COMMENT("} verify_klass");
90 }
91
92 void MethodHandles::verify_ref_kind(MacroAssembler* _masm, int ref_kind, Register member_reg, Register temp) {
93 Label L;
94 BLOCK_COMMENT("verify_ref_kind {");
95 __ load_sized_value(temp, NONZERO(java_lang_invoke_MemberName::flags_offset_in_bytes()), member_reg,
96 sizeof(u4), /*is_signed*/ false);
97 // assert(sizeof(u4) == sizeof(java.lang.invoke.MemberName.flags), "");
98 __ srwi( temp, temp, java_lang_invoke_MemberName::MN_REFERENCE_KIND_SHIFT);
99 __ andi(temp, temp, java_lang_invoke_MemberName::MN_REFERENCE_KIND_MASK);
100 __ cmpwi(CCR1, temp, ref_kind);
101 __ beq(CCR1, L);
102 { char* buf = NEW_C_HEAP_ARRAY(char, 100, mtInternal);
103 jio_snprintf(buf, 100, "verify_ref_kind expected %x", ref_kind);
104 if (ref_kind == JVM_REF_invokeVirtual ||
105 ref_kind == JVM_REF_invokeSpecial)
106 // could do this for all ref_kinds, but would explode assembly code size
107 trace_method_handle(_masm, buf);
108 __ stop(buf);
109 }
110 BLOCK_COMMENT("} verify_ref_kind");
111 __ BIND(L);
112 }
113
114 #endif // ASSERT
115
116 void MethodHandles::jump_from_method_handle(MacroAssembler* _masm, Register method, Register target, Register temp,
117 bool for_compiler_entry) {
118 assert(method == R19_method, "interpreter calling convention");
119 assert_different_registers(method, target, temp);
120
121 if (!for_compiler_entry && JvmtiExport::can_post_interpreter_events()) {
122 Label run_compiled_code;
123 // JVMTI events, such as single-stepping, are implemented partly by avoiding running
124 // compiled code in threads for which the event is enabled. Check here for
125 // interp_only_mode if these events CAN be enabled.
126 __ verify_thread();
127 __ lwz(temp, in_bytes(JavaThread::interp_only_mode_offset()), R16_thread);
128 __ cmplwi(CCR0, temp, 0);
129 __ beq(CCR0, run_compiled_code);
130 __ ld(target, in_bytes(Method::interpreter_entry_offset()), R19_method);
131 __ mtctr(target);
132 __ bctr();
133 __ BIND(run_compiled_code);
134 }
135
136 const ByteSize entry_offset = for_compiler_entry ? Method::from_compiled_offset() :
137 Method::from_interpreted_offset();
138 __ ld(target, in_bytes(entry_offset), R19_method);
139 __ mtctr(target);
140 __ bctr();
141 }
142
143
144 void MethodHandles::jump_to_lambda_form(MacroAssembler* _masm,
145 Register recv, Register method_temp,
146 Register temp2, Register temp3,
147 bool for_compiler_entry) {
148 BLOCK_COMMENT("jump_to_lambda_form {");
149 // This is the initial entry point of a lazy method handle.
150 // After type checking, it picks up the invoker from the LambdaForm.
151 assert_different_registers(recv, method_temp, temp2); // temp3 is only passed on
152 assert(method_temp == R19_method, "required register for loading method");
153
154 // Load the invoker, as MH -> MH.form -> LF.vmentry
155 __ verify_oop(recv);
156 __ load_heap_oop_not_null(method_temp, NONZERO(java_lang_invoke_MethodHandle::form_offset_in_bytes()), recv);
157 __ verify_oop(method_temp);
158 __ load_heap_oop_not_null(method_temp, NONZERO(java_lang_invoke_LambdaForm::vmentry_offset_in_bytes()), method_temp);
159 __ verify_oop(method_temp);
160 // the following assumes that a Method* is normally compressed in the vmtarget field:
161 __ ld(method_temp, NONZERO(java_lang_invoke_MemberName::vmtarget_offset_in_bytes()), method_temp);
162
163 if (VerifyMethodHandles && !for_compiler_entry) {
164 // make sure recv is already on stack
165 __ ld(temp2, in_bytes(Method::const_offset()), method_temp);
166 __ load_sized_value(temp2, in_bytes(ConstMethod::size_of_parameters_offset()), temp2,
167 sizeof(u2), /*is_signed*/ false);
168 // assert(sizeof(u2) == sizeof(ConstMethod::_size_of_parameters), "");
169 Label L;
170 __ ld(temp2, __ argument_offset(temp2, temp2, 0), R17_tos);
171 __ cmpd(CCR1, temp2, recv);
172 __ beq(CCR1, L);
173 __ stop("receiver not on stack");
174 __ BIND(L);
175 }
176
177 jump_from_method_handle(_masm, method_temp, temp2, temp3, for_compiler_entry);
178 BLOCK_COMMENT("} jump_to_lambda_form");
179 }
180
181
182
183 // Code generation
184 address MethodHandles::generate_method_handle_interpreter_entry(MacroAssembler* _masm,
185 vmIntrinsics::ID iid) {
186 const bool not_for_compiler_entry = false; // this is the interpreter entry
187 assert(is_signature_polymorphic(iid), "expected invoke iid");
188 if (iid == vmIntrinsics::_invokeGeneric ||
189 iid == vmIntrinsics::_compiledLambdaForm) {
190 // Perhaps surprisingly, the symbolic references visible to Java are not directly used.
191 // They are linked to Java-generated adapters via MethodHandleNatives.linkMethod.
192 // They all allow an appendix argument.
193 __ stop("Should not reach here"); // empty stubs make SG sick
194 return NULL;
195 }
196
197 Register argbase = R17_tos; // parameter (preserved)
198 Register argslot = R3;
199 Register temp1 = R6;
200 Register param_size = R7;
201
202 // here's where control starts out:
203 __ align(CodeEntryAlignment);
204 address entry_point = __ pc();
205
206 if (VerifyMethodHandles) {
207 Label L;
208 BLOCK_COMMENT("verify_intrinsic_id {");
209 __ load_sized_value(temp1, Method::intrinsic_id_offset_in_bytes(), R19_method,
210 sizeof(u1), /*is_signed*/ false);
211 // assert(sizeof(u1) == sizeof(Method::_intrinsic_id), "");
212 __ cmpwi(CCR1, temp1, (int) iid);
213 __ beq(CCR1, L);
214 if (iid == vmIntrinsics::_linkToVirtual ||
215 iid == vmIntrinsics::_linkToSpecial) {
216 // could do this for all kinds, but would explode assembly code size
217 trace_method_handle(_masm, "bad Method*:intrinsic_id");
218 }
219 __ stop("bad Method*::intrinsic_id");
220 __ BIND(L);
221 BLOCK_COMMENT("} verify_intrinsic_id");
222 }
223
224 // First task: Find out how big the argument list is.
225 int ref_kind = signature_polymorphic_intrinsic_ref_kind(iid);
226 assert(ref_kind != 0 || iid == vmIntrinsics::_invokeBasic, "must be _invokeBasic or a linkTo intrinsic");
227 if (ref_kind == 0 || MethodHandles::ref_kind_has_receiver(ref_kind)) {
228 __ ld(param_size, in_bytes(Method::const_offset()), R19_method);
229 __ load_sized_value(param_size, in_bytes(ConstMethod::size_of_parameters_offset()), param_size,
230 sizeof(u2), /*is_signed*/ false);
231 // assert(sizeof(u2) == sizeof(ConstMethod::_size_of_parameters), "");
232 } else {
233 DEBUG_ONLY(param_size = noreg);
234 }
235
236 Register tmp_mh = noreg;
237 if (!is_signature_polymorphic_static(iid)) {
238 __ ld(tmp_mh = temp1, __ argument_offset(param_size, param_size, 0), argbase);
239 DEBUG_ONLY(param_size = noreg);
240 }
241
242 if (TraceMethodHandles) {
243 if (tmp_mh != noreg)
244 __ mr(R23_method_handle, tmp_mh); // make stub happy
245 trace_method_handle_interpreter_entry(_masm, iid);
246 }
247
248 if (iid == vmIntrinsics::_invokeBasic) {
249 generate_method_handle_dispatch(_masm, iid, tmp_mh, noreg, not_for_compiler_entry);
250
251 } else {
252 // Adjust argument list by popping the trailing MemberName argument.
253 Register tmp_recv = noreg;
254 if (MethodHandles::ref_kind_has_receiver(ref_kind)) {
255 // Load the receiver (not the MH; the actual MemberName's receiver) up from the interpreter stack.
256 __ ld(tmp_recv = temp1, __ argument_offset(param_size, param_size, 0), argbase);
257 DEBUG_ONLY(param_size = noreg);
258 }
259 Register R19_member = R19_method; // MemberName ptr; incoming method ptr is dead now
260 __ ld(R19_member, RegisterOrConstant((intptr_t)8), argbase);
261 __ add(argbase, Interpreter::stackElementSize, argbase);
262 generate_method_handle_dispatch(_masm, iid, tmp_recv, R19_member, not_for_compiler_entry);
263 }
264
265 return entry_point;
266 }
267
268 void MethodHandles::generate_method_handle_dispatch(MacroAssembler* _masm,
269 vmIntrinsics::ID iid,
270 Register receiver_reg,
271 Register member_reg,
272 bool for_compiler_entry) {
273 assert(is_signature_polymorphic(iid), "expected invoke iid");
274 Register temp1 = (for_compiler_entry ? R21_tmp1 : R7);
275 Register temp2 = (for_compiler_entry ? R22_tmp2 : R8);
276 Register temp3 = (for_compiler_entry ? R23_tmp3 : R9);
277 Register temp4 = (for_compiler_entry ? R24_tmp4 : R10);
278 if (receiver_reg != noreg) assert_different_registers(temp1, temp2, temp3, temp4, receiver_reg);
279 if (member_reg != noreg) assert_different_registers(temp1, temp2, temp3, temp4, member_reg);
280
281 if (iid == vmIntrinsics::_invokeBasic) {
282 // indirect through MH.form.vmentry.vmtarget
283 jump_to_lambda_form(_masm, receiver_reg, R19_method, temp1, temp2, for_compiler_entry);
284 } else {
285 // The method is a member invoker used by direct method handles.
286 if (VerifyMethodHandles) {
287 // make sure the trailing argument really is a MemberName (caller responsibility)
288 verify_klass(_masm, member_reg, SystemDictionary::WK_KLASS_ENUM_NAME(MemberName_klass),
289 temp1, temp2,
290 "MemberName required for invokeVirtual etc.");
291 }
292
293 Register temp1_recv_klass = temp1;
294 if (iid != vmIntrinsics::_linkToStatic) {
295 __ verify_oop(receiver_reg);
296 if (iid == vmIntrinsics::_linkToSpecial) {
297 // Don't actually load the klass; just null-check the receiver.
298 __ null_check_throw(receiver_reg, 0, temp1, StubRoutines::throw_NullPointerException_at_call_entry());
299 } else {
300 // load receiver klass itself
301 __ null_check_throw(receiver_reg, oopDesc::klass_offset_in_bytes(),
302 temp1, StubRoutines::throw_NullPointerException_at_call_entry());
303 __ load_klass(temp1_recv_klass, receiver_reg);
304 __ verify_klass_ptr(temp1_recv_klass);
305 }
306 BLOCK_COMMENT("check_receiver {");
307 // The receiver for the MemberName must be in receiver_reg.
308 // Check the receiver against the MemberName.clazz
309 if (VerifyMethodHandles && iid == vmIntrinsics::_linkToSpecial) {
310 // Did not load it above...
311 __ load_klass(temp1_recv_klass, receiver_reg);
312 __ verify_klass_ptr(temp1_recv_klass);
313 }
314 if (VerifyMethodHandles && iid != vmIntrinsics::_linkToInterface) {
315 Label L_ok;
316 Register temp2_defc = temp2;
317 __ load_heap_oop_not_null(temp2_defc, NONZERO(java_lang_invoke_MemberName::clazz_offset_in_bytes()), member_reg);
318 load_klass_from_Class(_masm, temp2_defc, temp3, temp4);
319 __ verify_klass_ptr(temp2_defc);
320 __ check_klass_subtype(temp1_recv_klass, temp2_defc, temp3, temp4, L_ok);
321 // If we get here, the type check failed!
322 __ stop("receiver class disagrees with MemberName.clazz");
323 __ BIND(L_ok);
324 }
325 BLOCK_COMMENT("} check_receiver");
326 }
327 if (iid == vmIntrinsics::_linkToSpecial ||
328 iid == vmIntrinsics::_linkToStatic) {
329 DEBUG_ONLY(temp1_recv_klass = noreg); // these guys didn't load the recv_klass
330 }
331
332 // Live registers at this point:
333 // member_reg - MemberName that was the trailing argument
334 // temp1_recv_klass - klass of stacked receiver, if needed
335 // O5_savedSP - interpreter linkage (if interpreted)
336 // O0..O5 - compiler arguments (if compiled)
337
338 Label L_incompatible_class_change_error;
339 switch (iid) {
340 case vmIntrinsics::_linkToSpecial:
341 if (VerifyMethodHandles) {
342 verify_ref_kind(_masm, JVM_REF_invokeSpecial, member_reg, temp2);
343 }
344 __ ld(R19_method, NONZERO(java_lang_invoke_MemberName::vmtarget_offset_in_bytes()), member_reg);
345 break;
346
347 case vmIntrinsics::_linkToStatic:
348 if (VerifyMethodHandles) {
349 verify_ref_kind(_masm, JVM_REF_invokeStatic, member_reg, temp2);
350 }
351 __ ld(R19_method, NONZERO(java_lang_invoke_MemberName::vmtarget_offset_in_bytes()), member_reg);
352 break;
353
354 case vmIntrinsics::_linkToVirtual:
355 {
356 // same as TemplateTable::invokevirtual,
357 // minus the CP setup and profiling:
358
359 if (VerifyMethodHandles) {
360 verify_ref_kind(_masm, JVM_REF_invokeVirtual, member_reg, temp2);
361 }
362
363 // pick out the vtable index from the MemberName, and then we can discard it:
364 Register temp2_index = temp2;
365 __ ld(temp2_index, NONZERO(java_lang_invoke_MemberName::vmindex_offset_in_bytes()), member_reg);
366
367 if (VerifyMethodHandles) {
368 Label L_index_ok;
369 __ cmpdi(CCR1, temp2_index, 0);
370 __ bge(CCR1, L_index_ok);
371 __ stop("no virtual index");
372 __ BIND(L_index_ok);
373 }
374
375 // Note: The verifier invariants allow us to ignore MemberName.clazz and vmtarget
376 // at this point. And VerifyMethodHandles has already checked clazz, if needed.
377
378 // get target Method* & entry point
379 __ lookup_virtual_method(temp1_recv_klass, temp2_index, R19_method);
380 break;
381 }
382
383 case vmIntrinsics::_linkToInterface:
384 {
385 // same as TemplateTable::invokeinterface
386 // (minus the CP setup and profiling, with different argument motion)
387 if (VerifyMethodHandles) {
388 verify_ref_kind(_masm, JVM_REF_invokeInterface, member_reg, temp2);
389 }
390
391 Register temp2_intf = temp2;
392 __ load_heap_oop_not_null(temp2_intf, NONZERO(java_lang_invoke_MemberName::clazz_offset_in_bytes()), member_reg);
393 load_klass_from_Class(_masm, temp2_intf, temp3, temp4);
394 __ verify_klass_ptr(temp2_intf);
395
396 Register vtable_index = R19_method;
397 __ ld(vtable_index, NONZERO(java_lang_invoke_MemberName::vmindex_offset_in_bytes()), member_reg);
398 if (VerifyMethodHandles) {
399 Label L_index_ok;
400 __ cmpdi(CCR1, vtable_index, 0);
401 __ bge(CCR1, L_index_ok);
402 __ stop("invalid vtable index for MH.invokeInterface");
403 __ BIND(L_index_ok);
404 }
405
406 // given intf, index, and recv klass, dispatch to the implementation method
407 __ lookup_interface_method(temp1_recv_klass, temp2_intf,
408 // note: next two args must be the same:
409 vtable_index, R19_method,
410 temp3, temp4,
411 L_incompatible_class_change_error);
412 break;
413 }
414
415 default:
416 fatal(err_msg_res("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid)));
417 break;
418 }
419
420 // Live at this point:
421 // R19_method
422 // O5_savedSP (if interpreted)
423
424 // After figuring out which concrete method to call, jump into it.
425 // Note that this works in the interpreter with no data motion.
426 // But the compiled version will require that rcx_recv be shifted out.
427 __ verify_method_ptr(R19_method);
428 jump_from_method_handle(_masm, R19_method, temp1, temp2, for_compiler_entry);
429
430 if (iid == vmIntrinsics::_linkToInterface) {
431 __ BIND(L_incompatible_class_change_error);
432 __ load_const_optimized(temp1, StubRoutines::throw_IncompatibleClassChangeError_entry());
433 __ mtctr(temp1);
434 __ bctr();
435 }
436 }
437 }
438
439 #ifndef PRODUCT
440 void trace_method_handle_stub(const char* adaptername,
441 oopDesc* mh,
442 intptr_t* entry_sp,
443 intptr_t* saved_regs) {
444
445 bool has_mh = (strstr(adaptername, "/static") == NULL &&
446 strstr(adaptername, "linkTo") == NULL); // static linkers don't have MH
447 const char* mh_reg_name = has_mh ? "R23_method_handle" : "G23";
448 tty->print_cr("MH %s %s="INTPTR_FORMAT " sp=" INTPTR_FORMAT,
449 adaptername, mh_reg_name, (intptr_t) mh, entry_sp);
450
451 if (Verbose) {
452 tty->print_cr("Registers:");
453 const int abi_offset = frame::abi_112_size / 8;
454 for (int i = R3->encoding(); i <= R13->encoding(); i++) {
455 Register r = as_Register(i);
456 int count = i - R3->encoding();
457 // The registers are stored in reverse order on the stack (by save_volatile_gprs(R1_SP, abi_112_size)).
458 tty->print("%3s=" PTR_FORMAT, r->name(), saved_regs[abi_offset + count]);
459 if ((count + 1) % 4 == 0) {
460 tty->cr();
461 } else {
462 tty->print(", ");
463 }
464 }
465 tty->cr();
466
467 {
468 // dumping last frame with frame::describe
469
470 JavaThread* p = JavaThread::active();
471
472 ResourceMark rm;
473 PRESERVE_EXCEPTION_MARK; // may not be needed by safer and unexpensive here
474 FrameValues values;
475
476 // Note: We want to allow trace_method_handle from any call site.
477 // While trace_method_handle creates a frame, it may be entered
478 // without a PC on the stack top (e.g. not just after a call).
479 // Walking that frame could lead to failures due to that invalid PC.
480 // => carefully detect that frame when doing the stack walking
481
482 // Current C frame
483 frame cur_frame = os::current_frame();
484
485 // Robust search of trace_calling_frame (independant of inlining).
486 // Assumes saved_regs comes from a pusha in the trace_calling_frame.
487 assert(cur_frame.sp() < saved_regs, "registers not saved on stack ?");
488 frame trace_calling_frame = os::get_sender_for_C_frame(&cur_frame);
489 while (trace_calling_frame.fp() < saved_regs) {
490 trace_calling_frame = os::get_sender_for_C_frame(&trace_calling_frame);
491 }
492
493 // safely create a frame and call frame::describe
494 intptr_t *dump_sp = trace_calling_frame.sender_sp();
495
496 frame dump_frame = frame(dump_sp);
497 dump_frame.describe(values, 1);
498
499 values.describe(-1, saved_regs, "raw top of stack");
500
501 tty->print_cr("Stack layout:");
502 values.print(p);
503 }
504
505 if (has_mh && mh->is_oop()) {
506 mh->print();
507 if (java_lang_invoke_MethodHandle::is_instance(mh)) {
508 if (java_lang_invoke_MethodHandle::form_offset_in_bytes() != 0)
509 java_lang_invoke_MethodHandle::form(mh)->print();
510 }
511 }
512 }
513 }
514
515 void MethodHandles::trace_method_handle(MacroAssembler* _masm, const char* adaptername) {
516 if (!TraceMethodHandles) return;
517
518 BLOCK_COMMENT("trace_method_handle {");
519
520 int nbytes_save = 10 * 8; // 10 volatile gprs
521 __ save_LR_CR(R0);
522 __ mr(R0, R1_SP); // saved_sp
523 assert(Assembler::is_simm(-nbytes_save, 16), "Overwriting R0");
524 // push_frame_abi112 only uses R0 if nbytes_save is wider than 16 bit
525 __ push_frame_abi112(nbytes_save, R0);
526 __ save_volatile_gprs(R1_SP, frame::abi_112_size); // Except R0.
527
528 __ load_const(R3_ARG1, (address)adaptername);
529 __ mr(R4_ARG2, R23_method_handle);
530 __ mr(R5_ARG3, R0); // saved_sp
531 __ mr(R6_ARG4, R1_SP);
532 __ call_VM_leaf(CAST_FROM_FN_PTR(address, trace_method_handle_stub));
533
534 __ restore_volatile_gprs(R1_SP, 112); // except R0
535 __ pop_frame();
536 __ restore_LR_CR(R0);
537
538 BLOCK_COMMENT("} trace_method_handle");
539 }
540 #endif // PRODUCT