comparison src/cpu/x86/vm/templateInterpreter_x86_64.cpp @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 1d7922586cf6
children e522a00b91aa cd3d6a6b95d9
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
28 #include "interpreter/interpreter.hpp" 28 #include "interpreter/interpreter.hpp"
29 #include "interpreter/interpreterGenerator.hpp" 29 #include "interpreter/interpreterGenerator.hpp"
30 #include "interpreter/interpreterRuntime.hpp" 30 #include "interpreter/interpreterRuntime.hpp"
31 #include "interpreter/templateTable.hpp" 31 #include "interpreter/templateTable.hpp"
32 #include "oops/arrayOop.hpp" 32 #include "oops/arrayOop.hpp"
33 #include "oops/methodDataOop.hpp" 33 #include "oops/methodData.hpp"
34 #include "oops/methodOop.hpp" 34 #include "oops/method.hpp"
35 #include "oops/oop.inline.hpp" 35 #include "oops/oop.inline.hpp"
36 #include "prims/jvmtiExport.hpp" 36 #include "prims/jvmtiExport.hpp"
37 #include "prims/jvmtiThreadState.hpp" 37 #include "prims/jvmtiThreadState.hpp"
38 #include "runtime/arguments.hpp" 38 #include "runtime/arguments.hpp"
39 #include "runtime/deoptimization.hpp" 39 #include "runtime/deoptimization.hpp"
183 } 183 }
184 __ get_cache_and_index_at_bcp(rbx, rcx, 1, sizeof(u2)); 184 __ get_cache_and_index_at_bcp(rbx, rcx, 1, sizeof(u2));
185 __ bind(L_got_cache); 185 __ bind(L_got_cache);
186 __ movl(rbx, Address(rbx, rcx, 186 __ movl(rbx, Address(rbx, rcx,
187 Address::times_ptr, 187 Address::times_ptr,
188 in_bytes(constantPoolCacheOopDesc::base_offset()) + 188 in_bytes(ConstantPoolCache::base_offset()) +
189 3 * wordSize)); 189 3 * wordSize));
190 __ andl(rbx, 0xFF); 190 __ andl(rbx, 0xFF);
191 __ lea(rsp, Address(rsp, rbx, Address::times_8)); 191 __ lea(rsp, Address(rsp, rbx, Address::times_8));
192 __ dispatch_next(state, step); 192 __ dispatch_next(state, step);
193 193
297 // 297 //
298 void InterpreterGenerator::generate_counter_incr( 298 void InterpreterGenerator::generate_counter_incr(
299 Label* overflow, 299 Label* overflow,
300 Label* profile_method, 300 Label* profile_method,
301 Label* profile_method_continue) { 301 Label* profile_method_continue) {
302 const Address invocation_counter(rbx, in_bytes(methodOopDesc::invocation_counter_offset()) + 302 const Address invocation_counter(rbx, in_bytes(Method::invocation_counter_offset()) +
303 in_bytes(InvocationCounter::counter_offset())); 303 in_bytes(InvocationCounter::counter_offset()));
304 // Note: In tiered we increment either counters in methodOop or in MDO depending if we're profiling or not. 304 // Note: In tiered we increment either counters in Method* or in MDO depending if we're profiling or not.
305 if (TieredCompilation) { 305 if (TieredCompilation) {
306 int increment = InvocationCounter::count_increment; 306 int increment = InvocationCounter::count_increment;
307 int mask = ((1 << Tier0InvokeNotifyFreqLog) - 1) << InvocationCounter::count_shift; 307 int mask = ((1 << Tier0InvokeNotifyFreqLog) - 1) << InvocationCounter::count_shift;
308 Label no_mdo, done; 308 Label no_mdo, done;
309 if (ProfileInterpreter) { 309 if (ProfileInterpreter) {
310 // Are we profiling? 310 // Are we profiling?
311 __ movptr(rax, Address(rbx, methodOopDesc::method_data_offset())); 311 __ movptr(rax, Address(rbx, Method::method_data_offset()));
312 __ testptr(rax, rax); 312 __ testptr(rax, rax);
313 __ jccb(Assembler::zero, no_mdo); 313 __ jccb(Assembler::zero, no_mdo);
314 // Increment counter in the MDO 314 // Increment counter in the MDO
315 const Address mdo_invocation_counter(rax, in_bytes(methodDataOopDesc::invocation_counter_offset()) + 315 const Address mdo_invocation_counter(rax, in_bytes(MethodData::invocation_counter_offset()) +
316 in_bytes(InvocationCounter::counter_offset())); 316 in_bytes(InvocationCounter::counter_offset()));
317 __ increment_mask_and_jump(mdo_invocation_counter, increment, mask, rcx, false, Assembler::zero, overflow); 317 __ increment_mask_and_jump(mdo_invocation_counter, increment, mask, rcx, false, Assembler::zero, overflow);
318 __ jmpb(done); 318 __ jmpb(done);
319 } 319 }
320 __ bind(no_mdo); 320 __ bind(no_mdo);
321 // Increment counter in methodOop (we don't need to load it, it's in ecx). 321 // Increment counter in Method* (we don't need to load it, it's in ecx).
322 __ increment_mask_and_jump(invocation_counter, increment, mask, rcx, true, Assembler::zero, overflow); 322 __ increment_mask_and_jump(invocation_counter, increment, mask, rcx, true, Assembler::zero, overflow);
323 __ bind(done); 323 __ bind(done);
324 } else { 324 } else {
325 const Address backedge_counter(rbx, 325 const Address backedge_counter(rbx,
326 methodOopDesc::backedge_counter_offset() + 326 Method::backedge_counter_offset() +
327 InvocationCounter::counter_offset()); 327 InvocationCounter::counter_offset());
328 328
329 if (ProfileInterpreter) { // %%% Merge this into methodDataOop 329 if (ProfileInterpreter) { // %%% Merge this into MethodData*
330 __ incrementl(Address(rbx, 330 __ incrementl(Address(rbx,
331 methodOopDesc::interpreter_invocation_counter_offset())); 331 Method::interpreter_invocation_counter_offset()));
332 } 332 }
333 // Update standard invocation counters 333 // Update standard invocation counters
334 __ movl(rax, backedge_counter); // load backedge counter 334 __ movl(rax, backedge_counter); // load backedge counter
335 335
336 __ incrementl(rcx, InvocationCounter::count_increment); 336 __ incrementl(rcx, InvocationCounter::count_increment);
368 // On return (i.e. jump to entry_point) [ back to invocation of interpreter ] 368 // On return (i.e. jump to entry_point) [ back to invocation of interpreter ]
369 // Everything as it was on entry 369 // Everything as it was on entry
370 // rdx is not restored. Doesn't appear to really be set. 370 // rdx is not restored. Doesn't appear to really be set.
371 371
372 const Address size_of_parameters(rbx, 372 const Address size_of_parameters(rbx,
373 methodOopDesc::size_of_parameters_offset()); 373 Method::size_of_parameters_offset());
374 374
375 // InterpreterRuntime::frequency_counter_overflow takes two 375 // InterpreterRuntime::frequency_counter_overflow takes two
376 // arguments, the first (thread) is passed by call_VM, the second 376 // arguments, the first (thread) is passed by call_VM, the second
377 // indicates if the counter overflow occurs at a backwards branch 377 // indicates if the counter overflow occurs at a backwards branch
378 // (NULL bcp). We pass zero for it. The call returns the address 378 // (NULL bcp). We pass zero for it. The call returns the address
383 __ call_VM(noreg, 383 __ call_VM(noreg,
384 CAST_FROM_FN_PTR(address, 384 CAST_FROM_FN_PTR(address,
385 InterpreterRuntime::frequency_counter_overflow), 385 InterpreterRuntime::frequency_counter_overflow),
386 c_rarg1); 386 c_rarg1);
387 387
388 __ movptr(rbx, Address(rbp, method_offset)); // restore methodOop 388 __ movptr(rbx, Address(rbp, method_offset)); // restore Method*
389 // Preserve invariant that r13/r14 contain bcp/locals of sender frame 389 // Preserve invariant that r13/r14 contain bcp/locals of sender frame
390 // and jump to the interpreted entry. 390 // and jump to the interpreted entry.
391 __ jmp(*do_continue, relocInfo::none); 391 __ jmp(*do_continue, relocInfo::none);
392 } 392 }
393 393
399 // obvious in generate_method_entry) so the guard should work for them 399 // obvious in generate_method_entry) so the guard should work for them
400 // too. 400 // too.
401 // 401 //
402 // Args: 402 // Args:
403 // rdx: number of additional locals this frame needs (what we must check) 403 // rdx: number of additional locals this frame needs (what we must check)
404 // rbx: methodOop 404 // rbx: Method*
405 // 405 //
406 // Kills: 406 // Kills:
407 // rax 407 // rax
408 void InterpreterGenerator::generate_stack_overflow_check(void) { 408 void InterpreterGenerator::generate_stack_overflow_check(void) {
409 409
485 } 485 }
486 486
487 // Allocate monitor and lock method (asm interpreter) 487 // Allocate monitor and lock method (asm interpreter)
488 // 488 //
489 // Args: 489 // Args:
490 // rbx: methodOop 490 // rbx: Method*
491 // r14: locals 491 // r14: locals
492 // 492 //
493 // Kills: 493 // Kills:
494 // rax 494 // rax
495 // c_rarg0, c_rarg1, c_rarg2, c_rarg3, ...(param regs) 495 // c_rarg0, c_rarg1, c_rarg2, c_rarg3, ...(param regs)
496 // rscratch1, rscratch2 (scratch regs) 496 // rscratch1, rscratch2 (scratch regs)
497 void InterpreterGenerator::lock_method(void) { 497 void InterpreterGenerator::lock_method(void) {
498 // synchronize method 498 // synchronize method
499 const Address access_flags(rbx, methodOopDesc::access_flags_offset()); 499 const Address access_flags(rbx, Method::access_flags_offset());
500 const Address monitor_block_top( 500 const Address monitor_block_top(
501 rbp, 501 rbp,
502 frame::interpreter_frame_monitor_block_top_offset * wordSize); 502 frame::interpreter_frame_monitor_block_top_offset * wordSize);
503 const int entry_size = frame::interpreter_frame_monitor_size() * wordSize; 503 const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
504 504
520 __ movl(rax, access_flags); 520 __ movl(rax, access_flags);
521 __ testl(rax, JVM_ACC_STATIC); 521 __ testl(rax, JVM_ACC_STATIC);
522 // get receiver (assume this is frequent case) 522 // get receiver (assume this is frequent case)
523 __ movptr(rax, Address(r14, Interpreter::local_offset_in_bytes(0))); 523 __ movptr(rax, Address(r14, Interpreter::local_offset_in_bytes(0)));
524 __ jcc(Assembler::zero, done); 524 __ jcc(Assembler::zero, done);
525 __ movptr(rax, Address(rbx, methodOopDesc::const_offset())); 525 __ movptr(rax, Address(rbx, Method::const_offset()));
526 __ movptr(rax, Address(rax, constMethodOopDesc::constants_offset())); 526 __ movptr(rax, Address(rax, ConstMethod::constants_offset()));
527 __ movptr(rax, Address(rax, 527 __ movptr(rax, Address(rax,
528 constantPoolOopDesc::pool_holder_offset_in_bytes())); 528 ConstantPool::pool_holder_offset_in_bytes()));
529 __ movptr(rax, Address(rax, mirror_offset)); 529 __ movptr(rax, Address(rax, mirror_offset));
530 530
531 #ifdef ASSERT 531 #ifdef ASSERT
532 { 532 {
533 Label L; 533 Label L;
553 // Generate a fixed interpreter frame. This is identical setup for 553 // Generate a fixed interpreter frame. This is identical setup for
554 // interpreted methods and for native methods hence the shared code. 554 // interpreted methods and for native methods hence the shared code.
555 // 555 //
556 // Args: 556 // Args:
557 // rax: return address 557 // rax: return address
558 // rbx: methodOop 558 // rbx: Method*
559 // r14: pointer to locals 559 // r14: pointer to locals
560 // r13: sender sp 560 // r13: sender sp
561 // rdx: cp cache 561 // rdx: cp cache
562 void TemplateInterpreterGenerator::generate_fixed_frame(bool native_call) { 562 void TemplateInterpreterGenerator::generate_fixed_frame(bool native_call) {
563 // initialize fixed part of activation frame 563 // initialize fixed part of activation frame
564 __ push(rax); // save return address 564 __ push(rax); // save return address
565 __ enter(); // save old & set new rbp 565 __ enter(); // save old & set new rbp
566 __ push(r13); // set sender sp 566 __ push(r13); // set sender sp
567 __ push((int)NULL_WORD); // leave last_sp as null 567 __ push((int)NULL_WORD); // leave last_sp as null
568 __ movptr(r13, Address(rbx, methodOopDesc::const_offset())); // get constMethodOop 568 __ movptr(r13, Address(rbx, Method::const_offset())); // get ConstMethod*
569 __ lea(r13, Address(r13, constMethodOopDesc::codes_offset())); // get codebase 569 __ lea(r13, Address(r13, ConstMethod::codes_offset())); // get codebase
570 __ push(rbx); // save methodOop 570 __ push(rbx); // save Method*
571 if (ProfileInterpreter) { 571 if (ProfileInterpreter) {
572 Label method_data_continue; 572 Label method_data_continue;
573 __ movptr(rdx, Address(rbx, in_bytes(methodOopDesc::method_data_offset()))); 573 __ movptr(rdx, Address(rbx, in_bytes(Method::method_data_offset())));
574 __ testptr(rdx, rdx); 574 __ testptr(rdx, rdx);
575 __ jcc(Assembler::zero, method_data_continue); 575 __ jcc(Assembler::zero, method_data_continue);
576 __ addptr(rdx, in_bytes(methodDataOopDesc::data_offset())); 576 __ addptr(rdx, in_bytes(MethodData::data_offset()));
577 __ bind(method_data_continue); 577 __ bind(method_data_continue);
578 __ push(rdx); // set the mdp (method data pointer) 578 __ push(rdx); // set the mdp (method data pointer)
579 } else { 579 } else {
580 __ push(0); 580 __ push(0);
581 } 581 }
582 582
583 __ movptr(rdx, Address(rbx, methodOopDesc::const_offset())); 583 __ movptr(rdx, Address(rbx, Method::const_offset()));
584 __ movptr(rdx, Address(rdx, constMethodOopDesc::constants_offset())); 584 __ movptr(rdx, Address(rdx, ConstMethod::constants_offset()));
585 __ movptr(rdx, Address(rdx, constantPoolOopDesc::cache_offset_in_bytes())); 585 __ movptr(rdx, Address(rdx, ConstantPool::cache_offset_in_bytes()));
586 __ push(rdx); // set constant pool cache 586 __ push(rdx); // set constant pool cache
587 __ push(r14); // set locals pointer 587 __ push(r14); // set locals pointer
588 if (native_call) { 588 if (native_call) {
589 __ push(0); // no bcp 589 __ push(0); // no bcp
590 } else { 590 } else {
602 // 602 //
603 603
604 // Call an accessor method (assuming it is resolved, otherwise drop 604 // Call an accessor method (assuming it is resolved, otherwise drop
605 // into vanilla (slow path) entry 605 // into vanilla (slow path) entry
606 address InterpreterGenerator::generate_accessor_entry(void) { 606 address InterpreterGenerator::generate_accessor_entry(void) {
607 // rbx: methodOop 607 // rbx: Method*
608 608
609 // r13: senderSP must preserver for slow path, set SP to it on fast path 609 // r13: senderSP must preserver for slow path, set SP to it on fast path
610 610
611 address entry_point = __ pc(); 611 address entry_point = __ pc();
612 Label xreturn_path; 612 Label xreturn_path;
630 // check if local 0 != NULL and read field 630 // check if local 0 != NULL and read field
631 __ testptr(rax, rax); 631 __ testptr(rax, rax);
632 __ jcc(Assembler::zero, slow_path); 632 __ jcc(Assembler::zero, slow_path);
633 633
634 // read first instruction word and extract bytecode @ 1 and index @ 2 634 // read first instruction word and extract bytecode @ 1 and index @ 2
635 __ movptr(rdx, Address(rbx, methodOopDesc::const_offset())); 635 __ movptr(rdx, Address(rbx, Method::const_offset()));
636 __ movptr(rdi, Address(rdx, constMethodOopDesc::constants_offset())); 636 __ movptr(rdi, Address(rdx, ConstMethod::constants_offset()));
637 __ movl(rdx, Address(rdx, constMethodOopDesc::codes_offset())); 637 __ movl(rdx, Address(rdx, ConstMethod::codes_offset()));
638 // Shift codes right to get the index on the right. 638 // Shift codes right to get the index on the right.
639 // The bytecode fetched looks like <index><0xb4><0x2a> 639 // The bytecode fetched looks like <index><0xb4><0x2a>
640 __ shrl(rdx, 2 * BitsPerByte); 640 __ shrl(rdx, 2 * BitsPerByte);
641 __ shll(rdx, exact_log2(in_words(ConstantPoolCacheEntry::size()))); 641 __ shll(rdx, exact_log2(in_words(ConstantPoolCacheEntry::size())));
642 __ movptr(rdi, Address(rdi, constantPoolOopDesc::cache_offset_in_bytes())); 642 __ movptr(rdi, Address(rdi, ConstantPool::cache_offset_in_bytes()));
643 643
644 // rax: local 0 644 // rax: local 0
645 // rbx: method 645 // rbx: method
646 // rdx: constant pool cache index 646 // rdx: constant pool cache index
647 // rdi: constant pool cache 647 // rdi: constant pool cache
653 "adjust shift below"); 653 "adjust shift below");
654 __ movl(rcx, 654 __ movl(rcx,
655 Address(rdi, 655 Address(rdi,
656 rdx, 656 rdx,
657 Address::times_8, 657 Address::times_8,
658 constantPoolCacheOopDesc::base_offset() + 658 ConstantPoolCache::base_offset() +
659 ConstantPoolCacheEntry::indices_offset())); 659 ConstantPoolCacheEntry::indices_offset()));
660 __ shrl(rcx, 2 * BitsPerByte); 660 __ shrl(rcx, 2 * BitsPerByte);
661 __ andl(rcx, 0xFF); 661 __ andl(rcx, 0xFF);
662 __ cmpl(rcx, Bytecodes::_getfield); 662 __ cmpl(rcx, Bytecodes::_getfield);
663 __ jcc(Assembler::notEqual, slow_path); 663 __ jcc(Assembler::notEqual, slow_path);
665 // Note: constant pool entry is not valid before bytecode is resolved 665 // Note: constant pool entry is not valid before bytecode is resolved
666 __ movptr(rcx, 666 __ movptr(rcx,
667 Address(rdi, 667 Address(rdi,
668 rdx, 668 rdx,
669 Address::times_8, 669 Address::times_8,
670 constantPoolCacheOopDesc::base_offset() + 670 ConstantPoolCache::base_offset() +
671 ConstantPoolCacheEntry::f2_offset())); 671 ConstantPoolCacheEntry::f2_offset()));
672 // edx: flags 672 // edx: flags
673 __ movl(rdx, 673 __ movl(rdx,
674 Address(rdi, 674 Address(rdi,
675 rdx, 675 rdx,
676 Address::times_8, 676 Address::times_8,
677 constantPoolCacheOopDesc::base_offset() + 677 ConstantPoolCache::base_offset() +
678 ConstantPoolCacheEntry::flags_offset())); 678 ConstantPoolCacheEntry::flags_offset()));
679 679
680 Label notObj, notInt, notByte, notShort; 680 Label notObj, notInt, notByte, notShort;
681 const Address field_address(rax, rcx, Address::times_1); 681 const Address field_address(rax, rcx, Address::times_1);
682 682
769 // and so we don't need to call the G1 pre-barrier. Thus we can use the 769 // and so we don't need to call the G1 pre-barrier. Thus we can use the
770 // regular method entry code to generate the NPE. 770 // regular method entry code to generate the NPE.
771 // 771 //
772 // This code is based on generate_accessor_enty. 772 // This code is based on generate_accessor_enty.
773 // 773 //
774 // rbx: methodOop 774 // rbx: Method*
775 775
776 // r13: senderSP must preserve for slow path, set SP to it on fast path 776 // r13: senderSP must preserve for slow path, set SP to it on fast path
777 777
778 address entry = __ pc(); 778 address entry = __ pc();
779 779
837 // native method than the typical interpreter frame setup. 837 // native method than the typical interpreter frame setup.
838 address InterpreterGenerator::generate_native_entry(bool synchronized) { 838 address InterpreterGenerator::generate_native_entry(bool synchronized) {
839 // determine code generation flags 839 // determine code generation flags
840 bool inc_counter = UseCompiler || CountCompiledCalls; 840 bool inc_counter = UseCompiler || CountCompiledCalls;
841 841
842 // rbx: methodOop 842 // rbx: Method*
843 // r13: sender sp 843 // r13: sender sp
844 844
845 address entry_point = __ pc(); 845 address entry_point = __ pc();
846 846
847 const Address size_of_parameters(rbx, methodOopDesc:: 847 const Address size_of_parameters(rbx, Method::
848 size_of_parameters_offset()); 848 size_of_parameters_offset());
849 const Address invocation_counter(rbx, methodOopDesc:: 849 const Address invocation_counter(rbx, Method::
850 invocation_counter_offset() + 850 invocation_counter_offset() +
851 InvocationCounter::counter_offset()); 851 InvocationCounter::counter_offset());
852 const Address access_flags (rbx, methodOopDesc::access_flags_offset()); 852 const Address access_flags (rbx, Method::access_flags_offset());
853 853
854 // get parameter size (always needed) 854 // get parameter size (always needed)
855 __ load_unsigned_short(rcx, size_of_parameters); 855 __ load_unsigned_short(rcx, size_of_parameters);
856 856
857 // native calls don't need the stack size check since they have no 857 // native calls don't need the stack size check since they have no
858 // expression stack and the arguments are already on the stack and 858 // expression stack and the arguments are already on the stack and
859 // we only add a handful of words to the stack 859 // we only add a handful of words to the stack
860 860
861 // rbx: methodOop 861 // rbx: Method*
862 // rcx: size of parameters 862 // rcx: size of parameters
863 // r13: sender sp 863 // r13: sender sp
864 __ pop(rax); // get return address 864 __ pop(rax); // get return address
865 865
866 // for natives the size of locals is zero 866 // for natives the size of locals is zero
965 const Register method = rbx; 965 const Register method = rbx;
966 const Register t = r11; 966 const Register t = r11;
967 967
968 // allocate space for parameters 968 // allocate space for parameters
969 __ get_method(method); 969 __ get_method(method);
970 __ verify_oop(method);
971 __ load_unsigned_short(t, 970 __ load_unsigned_short(t,
972 Address(method, 971 Address(method,
973 methodOopDesc::size_of_parameters_offset())); 972 Method::size_of_parameters_offset()));
974 __ shll(t, Interpreter::logStackElementSize); 973 __ shll(t, Interpreter::logStackElementSize);
975 974
976 __ subptr(rsp, t); 975 __ subptr(rsp, t);
977 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows 976 __ subptr(rsp, frame::arg_reg_save_area_bytes); // windows
978 __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI) 977 __ andptr(rsp, -16); // must be 16 byte boundary (see amd64 ABI)
979 978
980 // get signature handler 979 // get signature handler
981 { 980 {
982 Label L; 981 Label L;
983 __ movptr(t, Address(method, methodOopDesc::signature_handler_offset())); 982 __ movptr(t, Address(method, Method::signature_handler_offset()));
984 __ testptr(t, t); 983 __ testptr(t, t);
985 __ jcc(Assembler::notZero, L); 984 __ jcc(Assembler::notZero, L);
986 __ call_VM(noreg, 985 __ call_VM(noreg,
987 CAST_FROM_FN_PTR(address, 986 CAST_FROM_FN_PTR(address,
988 InterpreterRuntime::prepare_native_call), 987 InterpreterRuntime::prepare_native_call),
989 method); 988 method);
990 __ get_method(method); 989 __ get_method(method);
991 __ movptr(t, Address(method, methodOopDesc::signature_handler_offset())); 990 __ movptr(t, Address(method, Method::signature_handler_offset()));
992 __ bind(L); 991 __ bind(L);
993 } 992 }
994 993
995 // call signature handler 994 // call signature handler
996 assert(InterpreterRuntime::SignatureHandlerGenerator::from() == r14, 995 assert(InterpreterRuntime::SignatureHandlerGenerator::from() == r14,
1016 1015
1017 // pass mirror handle if static call 1016 // pass mirror handle if static call
1018 { 1017 {
1019 Label L; 1018 Label L;
1020 const int mirror_offset = in_bytes(Klass::java_mirror_offset()); 1019 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
1021 __ movl(t, Address(method, methodOopDesc::access_flags_offset())); 1020 __ movl(t, Address(method, Method::access_flags_offset()));
1022 __ testl(t, JVM_ACC_STATIC); 1021 __ testl(t, JVM_ACC_STATIC);
1023 __ jcc(Assembler::zero, L); 1022 __ jcc(Assembler::zero, L);
1024 // get mirror 1023 // get mirror
1025 __ movptr(t, Address(method, methodOopDesc::const_offset())); 1024 __ movptr(t, Address(method, Method::const_offset()));
1026 __ movptr(t, Address(t, constMethodOopDesc::constants_offset())); 1025 __ movptr(t, Address(t, ConstMethod::constants_offset()));
1027 __ movptr(t, Address(t, constantPoolOopDesc::pool_holder_offset_in_bytes())); 1026 __ movptr(t, Address(t, ConstantPool::pool_holder_offset_in_bytes()));
1028 __ movptr(t, Address(t, mirror_offset)); 1027 __ movptr(t, Address(t, mirror_offset));
1029 // copy mirror into activation frame 1028 // copy mirror into activation frame
1030 __ movptr(Address(rbp, frame::interpreter_frame_oop_temp_offset * wordSize), 1029 __ movptr(Address(rbp, frame::interpreter_frame_oop_temp_offset * wordSize),
1031 t); 1030 t);
1032 // pass handle to mirror 1031 // pass handle to mirror
1036 } 1035 }
1037 1036
1038 // get native function entry point 1037 // get native function entry point
1039 { 1038 {
1040 Label L; 1039 Label L;
1041 __ movptr(rax, Address(method, methodOopDesc::native_function_offset())); 1040 __ movptr(rax, Address(method, Method::native_function_offset()));
1042 ExternalAddress unsatisfied(SharedRuntime::native_method_throw_unsatisfied_link_error_entry()); 1041 ExternalAddress unsatisfied(SharedRuntime::native_method_throw_unsatisfied_link_error_entry());
1043 __ movptr(rscratch2, unsatisfied.addr()); 1042 __ movptr(rscratch2, unsatisfied.addr());
1044 __ cmpptr(rax, rscratch2); 1043 __ cmpptr(rax, rscratch2);
1045 __ jcc(Assembler::notEqual, L); 1044 __ jcc(Assembler::notEqual, L);
1046 __ call_VM(noreg, 1045 __ call_VM(noreg,
1047 CAST_FROM_FN_PTR(address, 1046 CAST_FROM_FN_PTR(address,
1048 InterpreterRuntime::prepare_native_call), 1047 InterpreterRuntime::prepare_native_call),
1049 method); 1048 method);
1050 __ get_method(method); 1049 __ get_method(method);
1051 __ verify_oop(method); 1050 __ movptr(rax, Address(method, Method::native_function_offset()));
1052 __ movptr(rax, Address(method, methodOopDesc::native_function_offset()));
1053 __ bind(L); 1051 __ bind(L);
1054 } 1052 }
1055 1053
1056 // pass JNIEnv 1054 // pass JNIEnv
1057 __ lea(c_rarg0, Address(r15_thread, JavaThread::jni_environment_offset())); 1055 __ lea(c_rarg0, Address(r15_thread, JavaThread::jni_environment_offset()));
1199 1197
1200 // The method register is junk from after the thread_in_native transition 1198 // The method register is junk from after the thread_in_native transition
1201 // until here. Also can't call_VM until the bcp has been 1199 // until here. Also can't call_VM until the bcp has been
1202 // restored. Need bcp for throwing exception below so get it now. 1200 // restored. Need bcp for throwing exception below so get it now.
1203 __ get_method(method); 1201 __ get_method(method);
1204 __ verify_oop(method);
1205 1202
1206 // restore r13 to have legal interpreter frame, i.e., bci == 0 <=> 1203 // restore r13 to have legal interpreter frame, i.e., bci == 0 <=>
1207 // r13 == code_base() 1204 // r13 == code_base()
1208 __ movptr(r13, Address(method, methodOopDesc::const_offset())); // get constMethodOop 1205 __ movptr(r13, Address(method, Method::const_offset())); // get ConstMethod*
1209 __ lea(r13, Address(r13, constMethodOopDesc::codes_offset())); // get codebase 1206 __ lea(r13, Address(r13, ConstMethod::codes_offset())); // get codebase
1210 // handle exceptions (exception handling will handle unlocking!) 1207 // handle exceptions (exception handling will handle unlocking!)
1211 { 1208 {
1212 Label L; 1209 Label L;
1213 __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t) NULL_WORD); 1210 __ cmpptr(Address(r15_thread, Thread::pending_exception_offset()), (int32_t) NULL_WORD);
1214 __ jcc(Assembler::zero, L); 1211 __ jcc(Assembler::zero, L);
1224 } 1221 }
1225 1222
1226 // do unlocking if necessary 1223 // do unlocking if necessary
1227 { 1224 {
1228 Label L; 1225 Label L;
1229 __ movl(t, Address(method, methodOopDesc::access_flags_offset())); 1226 __ movl(t, Address(method, Method::access_flags_offset()));
1230 __ testl(t, JVM_ACC_SYNCHRONIZED); 1227 __ testl(t, JVM_ACC_SYNCHRONIZED);
1231 __ jcc(Assembler::zero, L); 1228 __ jcc(Assembler::zero, L);
1232 // the code below should be shared with interpreter macro 1229 // the code below should be shared with interpreter macro
1233 // assembler implementation 1230 // assembler implementation
1234 { 1231 {
1299 // 1296 //
1300 address InterpreterGenerator::generate_normal_entry(bool synchronized) { 1297 address InterpreterGenerator::generate_normal_entry(bool synchronized) {
1301 // determine code generation flags 1298 // determine code generation flags
1302 bool inc_counter = UseCompiler || CountCompiledCalls; 1299 bool inc_counter = UseCompiler || CountCompiledCalls;
1303 1300
1304 // ebx: methodOop 1301 // ebx: Method*
1305 // r13: sender sp 1302 // r13: sender sp
1306 address entry_point = __ pc(); 1303 address entry_point = __ pc();
1307 1304
1308 const Address size_of_parameters(rbx, 1305 const Address size_of_parameters(rbx,
1309 methodOopDesc::size_of_parameters_offset()); 1306 Method::size_of_parameters_offset());
1310 const Address size_of_locals(rbx, methodOopDesc::size_of_locals_offset()); 1307 const Address size_of_locals(rbx, Method::size_of_locals_offset());
1311 const Address invocation_counter(rbx, 1308 const Address invocation_counter(rbx,
1312 methodOopDesc::invocation_counter_offset() + 1309 Method::invocation_counter_offset() +
1313 InvocationCounter::counter_offset()); 1310 InvocationCounter::counter_offset());
1314 const Address access_flags(rbx, methodOopDesc::access_flags_offset()); 1311 const Address access_flags(rbx, Method::access_flags_offset());
1315 1312
1316 // get parameter size (always needed) 1313 // get parameter size (always needed)
1317 __ load_unsigned_short(rcx, size_of_parameters); 1314 __ load_unsigned_short(rcx, size_of_parameters);
1318 1315
1319 // rbx: methodOop 1316 // rbx: Method*
1320 // rcx: size of parameters 1317 // rcx: size of parameters
1321 // r13: sender_sp (could differ from sp+wordSize if we were called via c2i ) 1318 // r13: sender_sp (could differ from sp+wordSize if we were called via c2i )
1322 1319
1323 __ load_unsigned_short(rdx, size_of_locals); // get size of locals in words 1320 __ load_unsigned_short(rdx, size_of_locals); // get size of locals in words
1324 __ subl(rdx, rcx); // rdx = no. of additional locals 1321 __ subl(rdx, rcx); // rdx = no. of additional locals
1478 // When control flow reaches any of the entry types for the interpreter 1475 // When control flow reaches any of the entry types for the interpreter
1479 // the following holds -> 1476 // the following holds ->
1480 // 1477 //
1481 // Arguments: 1478 // Arguments:
1482 // 1479 //
1483 // rbx: methodOop 1480 // rbx: Method*
1484 // 1481 //
1485 // Stack layout immediately at entry 1482 // Stack layout immediately at entry
1486 // 1483 //
1487 // [ return address ] <--- rsp 1484 // [ return address ] <--- rsp
1488 // [ parameter n ] 1485 // [ parameter n ]
1503 // ... 1500 // ...
1504 // [ monitor entry ] 1501 // [ monitor entry ]
1505 // [ expr. stack bottom ] 1502 // [ expr. stack bottom ]
1506 // [ saved r13 ] 1503 // [ saved r13 ]
1507 // [ current r14 ] 1504 // [ current r14 ]
1508 // [ methodOop ] 1505 // [ Method* ]
1509 // [ saved ebp ] <--- rbp 1506 // [ saved ebp ] <--- rbp
1510 // [ return address ] 1507 // [ return address ]
1511 // [ local variable m ] 1508 // [ local variable m ]
1512 // ... 1509 // ...
1513 // [ local variable 1 ] 1510 // [ local variable 1 ]
1572 return true; 1569 return true;
1573 } 1570 }
1574 } 1571 }
1575 1572
1576 // How much stack a method activation needs in words. 1573 // How much stack a method activation needs in words.
1577 int AbstractInterpreter::size_top_interpreter_activation(methodOop method) { 1574 int AbstractInterpreter::size_top_interpreter_activation(Method* method) {
1578 const int entry_size = frame::interpreter_frame_monitor_size(); 1575 const int entry_size = frame::interpreter_frame_monitor_size();
1579 1576
1580 // total overhead size: entry_size + (saved rbp thru expr stack 1577 // total overhead size: entry_size + (saved rbp thru expr stack
1581 // bottom). be sure to change this if you add/subtract anything 1578 // bottom). be sure to change this if you add/subtract anything
1582 // to/from the overhead area 1579 // to/from the overhead area
1583 const int overhead_size = 1580 const int overhead_size =
1584 -(frame::interpreter_frame_initial_sp_offset) + entry_size; 1581 -(frame::interpreter_frame_initial_sp_offset) + entry_size;
1585 1582
1586 const int stub_code = frame::entry_frame_after_call_words; 1583 const int stub_code = frame::entry_frame_after_call_words;
1587 const int extra_stack = methodOopDesc::extra_stack_entries(); 1584 const int extra_stack = Method::extra_stack_entries();
1588 const int method_stack = (method->max_locals() + method->max_stack() + extra_stack) * 1585 const int method_stack = (method->max_locals() + method->max_stack() + extra_stack) *
1589 Interpreter::stackElementWords; 1586 Interpreter::stackElementWords;
1590 return (overhead_size + method_stack + stub_code); 1587 return (overhead_size + method_stack + stub_code);
1591 } 1588 }
1592 1589
1593 int AbstractInterpreter::layout_activation(methodOop method, 1590 int AbstractInterpreter::layout_activation(Method* method,
1594 int tempcount, 1591 int tempcount,
1595 int popframe_extra_args, 1592 int popframe_extra_args,
1596 int moncount, 1593 int moncount,
1597 int caller_actual_parameters, 1594 int caller_actual_parameters,
1598 int callee_param_count, 1595 int callee_param_count,
1753 __ jcc(Assembler::notZero, caller_not_deoptimized); 1750 __ jcc(Assembler::notZero, caller_not_deoptimized);
1754 1751
1755 // Compute size of arguments for saving when returning to 1752 // Compute size of arguments for saving when returning to
1756 // deoptimized caller 1753 // deoptimized caller
1757 __ get_method(rax); 1754 __ get_method(rax);
1758 __ load_unsigned_short(rax, Address(rax, in_bytes(methodOopDesc:: 1755 __ load_unsigned_short(rax, Address(rax, in_bytes(Method::
1759 size_of_parameters_offset()))); 1756 size_of_parameters_offset())));
1760 __ shll(rax, Interpreter::logStackElementSize); 1757 __ shll(rax, Interpreter::logStackElementSize);
1761 __ restore_locals(); // XXX do we need this? 1758 __ restore_locals(); // XXX do we need this?
1762 __ subptr(r14, rax); 1759 __ subptr(r14, rax);
1763 __ addptr(r14, wordSize); 1760 __ addptr(r14, wordSize);
1830 __ pop_ptr(rax); 1827 __ pop_ptr(rax);
1831 __ movptr(Address(r15_thread, JavaThread::vm_result_offset()), rax); 1828 __ movptr(Address(r15_thread, JavaThread::vm_result_offset()), rax);
1832 // remove the activation (without doing throws on illegalMonitorExceptions) 1829 // remove the activation (without doing throws on illegalMonitorExceptions)
1833 __ remove_activation(vtos, rdx, false, true, false); 1830 __ remove_activation(vtos, rdx, false, true, false);
1834 // restore exception 1831 // restore exception
1835 __ movptr(rax, Address(r15_thread, JavaThread::vm_result_offset())); 1832 __ get_vm_result(rax, r15_thread);
1836 __ movptr(Address(r15_thread, JavaThread::vm_result_offset()), (int32_t)NULL_WORD);
1837 __ verify_oop(rax);
1838 1833
1839 // In between activations - previous activation type unknown yet 1834 // In between activations - previous activation type unknown yet
1840 // compute continuation point - the continuation point expects the 1835 // compute continuation point - the continuation point expects the
1841 // following registers set up: 1836 // following registers set up:
1842 // 1837 //