comparison src/cpu/ppc/vm/interpreter_ppc.cpp @ 14726:92aa6797d639

Backed out merge changeset: b51e29501f30 Backed out merge revision to its first parent (8f483e200405)
author Doug Simon <doug.simon@oracle.com>
date Mon, 24 Mar 2014 21:30:43 +0100
parents 58cf34613a72
children
comparison
equal deleted inserted replaced
14719:0bdd0d157040 14726:92aa6797d639
1 /* 1 /*
2 * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved. 2 * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
3 * Copyright 2012, 2014 SAP AG. All rights reserved. 3 * Copyright 2012, 2013 SAP AG. All rights reserved.
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 * 5 *
6 * This code is free software; you can redistribute it and/or modify it 6 * This code is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 only, as 7 * under the terms of the GNU General Public License version 2 only, as
8 * published by the Free Software Foundation. 8 * published by the Free Software Foundation.
49 #include "utilities/debug.hpp" 49 #include "utilities/debug.hpp"
50 #ifdef COMPILER1 50 #ifdef COMPILER1
51 #include "c1/c1_Runtime1.hpp" 51 #include "c1/c1_Runtime1.hpp"
52 #endif 52 #endif
53 53
54 #ifndef CC_INTERP
55 #error "CC_INTERP must be defined on PPC"
56 #endif
57
54 #define __ _masm-> 58 #define __ _masm->
55 59
56 #ifdef PRODUCT 60 #ifdef PRODUCT
57 #define BLOCK_COMMENT(str) // nothing 61 #define BLOCK_COMMENT(str) // nothing
58 #else 62 #else
122 const Register argcnt = R26_tmp6; 126 const Register argcnt = R26_tmp6;
123 const Register intSlot = R27_tmp7; 127 const Register intSlot = R27_tmp7;
124 const Register target_sp = R28_tmp8; 128 const Register target_sp = R28_tmp8;
125 const FloatRegister floatSlot = F0; 129 const FloatRegister floatSlot = F0;
126 130
127 address entry = __ function_entry(); 131 address entry = __ emit_fd();
128 132
129 __ save_LR_CR(R0); 133 __ save_LR_CR(R0);
130 __ save_nonvolatile_gprs(R1_SP, _spill_nonvolatiles_neg(r14)); 134 __ save_nonvolatile_gprs(R1_SP, _spill_nonvolatiles_neg(r14));
131 // We use target_sp for storing arguments in the C frame. 135 // We use target_sp for storing arguments in the C frame.
132 __ mr(target_sp, R1_SP); 136 __ mr(target_sp, R1_SP);
133 __ push_frame_reg_args_nonvolatiles(0, R11_scratch1); 137 __ push_frame_abi112_nonvolatiles(0, R11_scratch1);
134 138
135 __ mr(arg_java, R3_ARG1); 139 __ mr(arg_java, R3_ARG1);
136 140
137 __ call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::get_signature), R16_thread, R19_method); 141 __ call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::get_signature), R16_thread, R19_method);
138 142
141 145
142 // Reload method, it may have moved. 146 // Reload method, it may have moved.
143 #ifdef CC_INTERP 147 #ifdef CC_INTERP
144 __ ld(R19_method, state_(_method)); 148 __ ld(R19_method, state_(_method));
145 #else 149 #else
146 __ ld(R19_method, 0, target_sp); 150 __ unimplemented("slow signature handler 1");
147 __ ld(R19_method, _ijava_state_neg(method), R19_method);
148 #endif 151 #endif
149 152
150 // Get the result handler. 153 // Get the result handler.
151 __ call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::get_result_handler), R16_thread, R19_method); 154 __ call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::get_result_handler), R16_thread, R19_method);
152 155
153 // Reload method, it may have moved. 156 // Reload method, it may have moved.
154 #ifdef CC_INTERP 157 #ifdef CC_INTERP
155 __ ld(R19_method, state_(_method)); 158 __ ld(R19_method, state_(_method));
156 #else 159 #else
157 __ ld(R19_method, 0, target_sp); 160 __ unimplemented("slow signature handler 2");
158 __ ld(R19_method, _ijava_state_neg(method), R19_method);
159 #endif 161 #endif
160 162
161 { 163 {
162 Label L; 164 Label L;
163 // test if static 165 // test if static
449 address entry = __ pc(); 451 address entry = __ pc();
450 452
451 // 453 //
452 // Registers alive 454 // Registers alive
453 // R16_thread - JavaThread* 455 // R16_thread - JavaThread*
454 // R19_method - callee's method (method to be invoked) 456 // R19_method - callee's methodOop (method to be invoked)
455 // R1_SP - SP prepared such that caller's outgoing args are near top 457 // R1_SP - SP prepared such that caller's outgoing args are near top
456 // LR - return address to caller 458 // LR - return address to caller
457 // 459 //
458 // Stack layout at this point: 460 // Stack layout at this point:
459 // 461 //
470 // our caller set up the JavaFrameAnchor. 472 // our caller set up the JavaFrameAnchor.
471 __ set_top_ijava_frame_at_SP_as_last_Java_frame(R1_SP, R12_scratch2/*tmp*/); 473 __ set_top_ijava_frame_at_SP_as_last_Java_frame(R1_SP, R12_scratch2/*tmp*/);
472 474
473 // Push a new C frame and save LR. 475 // Push a new C frame and save LR.
474 __ save_LR_CR(R0); 476 __ save_LR_CR(R0);
475 __ push_frame_reg_args(0, R11_scratch1); 477 __ push_frame_abi112(0, R11_scratch1);
476 478
477 // This is not a leaf but we have a JavaFrameAnchor now and we will 479 // This is not a leaf but we have a JavaFrameAnchor now and we will
478 // check (create) exceptions afterward so this is ok. 480 // check (create) exceptions afterward so this is ok.
479 __ call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError)); 481 __ call_VM_leaf(CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError));
480 482
487 489
488 #ifdef CC_INTERP 490 #ifdef CC_INTERP
489 // Return to frame manager, it will handle the pending exception. 491 // Return to frame manager, it will handle the pending exception.
490 __ blr(); 492 __ blr();
491 #else 493 #else
492 // We don't know our caller, so jump to the general forward exception stub, 494 Unimplemented();
493 // which will also pop our full frame off. Satisfy the interface of
494 // SharedRuntime::generate_forward_exception()
495 __ load_const_optimized(R11_scratch1, StubRoutines::forward_exception_entry(), R0);
496 __ mtctr(R11_scratch1);
497 __ bctr();
498 #endif 495 #endif
499 496
500 return entry; 497 return entry;
501 } 498 }
502 499
503 // Call an accessor method (assuming it is resolved, otherwise drop into 500 // Call an accessor method (assuming it is resolved, otherwise drop into
504 // vanilla (slow path) entry. 501 // vanilla (slow path) entry.
505 address InterpreterGenerator::generate_accessor_entry(void) { 502 address InterpreterGenerator::generate_accessor_entry(void) {
506 if (!UseFastAccessorMethods && (!FLAG_IS_ERGO(UseFastAccessorMethods))) { 503 if(!UseFastAccessorMethods && (!FLAG_IS_ERGO(UseFastAccessorMethods)))
507 return NULL; 504 return NULL;
508 }
509 505
510 Label Lslow_path, Lacquire; 506 Label Lslow_path, Lacquire;
511 507
512 const Register 508 const Register
513 Rclass_or_obj = R3_ARG1, 509 Rclass_or_obj = R3_ARG1,
588 __ mr(R1_SP, R21_sender_SP); // Cut the stack back to where the caller started. 584 __ mr(R1_SP, R21_sender_SP); // Cut the stack back to where the caller started.
589 585
590 // Load from branch table and dispatch (volatile case: one instruction ahead) 586 // Load from branch table and dispatch (volatile case: one instruction ahead)
591 __ sldi(Rflags, Rflags, LogBytesPerWord); 587 __ sldi(Rflags, Rflags, LogBytesPerWord);
592 __ cmpwi(CCR6, Rscratch, 1); // volatile? 588 __ cmpwi(CCR6, Rscratch, 1); // volatile?
593 if (support_IRIW_for_not_multiple_copy_atomic_cpu) { 589 __ sldi(Rscratch, Rscratch, exact_log2(BytesPerInstWord)); // volatile ? size of 1 instruction : 0
594 __ sldi(Rscratch, Rscratch, exact_log2(BytesPerInstWord)); // volatile ? size of 1 instruction : 0
595 }
596 __ ldx(Rbtable, Rbtable, Rflags); 590 __ ldx(Rbtable, Rbtable, Rflags);
597 591
598 if (support_IRIW_for_not_multiple_copy_atomic_cpu) { 592 __ subf(Rbtable, Rscratch, Rbtable); // point to volatile/non-volatile entry point
599 __ subf(Rbtable, Rscratch, Rbtable); // point to volatile/non-volatile entry point
600 }
601 __ mtctr(Rbtable); 593 __ mtctr(Rbtable);
602 __ bctr(); 594 __ bctr();
603 595
604 #ifdef ASSERT 596 #ifdef ASSERT
605 __ bind(LFlagInvalid); 597 __ bind(LFlagInvalid);
611 all_uninitialized = all_uninitialized && (branch_table[i] == NULL); 603 all_uninitialized = all_uninitialized && (branch_table[i] == NULL);
612 all_initialized = all_initialized && (branch_table[i] != NULL); 604 all_initialized = all_initialized && (branch_table[i] != NULL);
613 } 605 }
614 assert(all_uninitialized != all_initialized, "consistency"); // either or 606 assert(all_uninitialized != all_initialized, "consistency"); // either or
615 607
616 __ fence(); // volatile entry point (one instruction before non-volatile_entry point) 608 __ sync(); // volatile entry point (one instruction before non-volatile_entry point)
617 if (branch_table[vtos] == 0) branch_table[vtos] = __ pc(); // non-volatile_entry point 609 if (branch_table[vtos] == 0) branch_table[vtos] = __ pc(); // non-volatile_entry point
618 if (branch_table[dtos] == 0) branch_table[dtos] = __ pc(); // non-volatile_entry point 610 if (branch_table[dtos] == 0) branch_table[dtos] = __ pc(); // non-volatile_entry point
619 if (branch_table[ftos] == 0) branch_table[ftos] = __ pc(); // non-volatile_entry point 611 if (branch_table[ftos] == 0) branch_table[ftos] = __ pc(); // non-volatile_entry point
620 __ stop("unexpected type", 0x6551); 612 __ stop("unexpected type", 0x6551);
621 #endif 613 #endif
622 614
623 if (branch_table[itos] == 0) { // generate only once 615 if (branch_table[itos] == 0) { // generate only once
624 __ align(32, 28, 28); // align load 616 __ align(32, 28, 28); // align load
625 __ fence(); // volatile entry point (one instruction before non-volatile_entry point) 617 __ sync(); // volatile entry point (one instruction before non-volatile_entry point)
626 branch_table[itos] = __ pc(); // non-volatile_entry point 618 branch_table[itos] = __ pc(); // non-volatile_entry point
627 __ lwax(R3_RET, Rclass_or_obj, Roffset); 619 __ lwax(R3_RET, Rclass_or_obj, Roffset);
628 __ beq(CCR6, Lacquire); 620 __ beq(CCR6, Lacquire);
629 __ blr(); 621 __ blr();
630 } 622 }
631 623
632 if (branch_table[ltos] == 0) { // generate only once 624 if (branch_table[ltos] == 0) { // generate only once
633 __ align(32, 28, 28); // align load 625 __ align(32, 28, 28); // align load
634 __ fence(); // volatile entry point (one instruction before non-volatile_entry point) 626 __ sync(); // volatile entry point (one instruction before non-volatile_entry point)
635 branch_table[ltos] = __ pc(); // non-volatile_entry point 627 branch_table[ltos] = __ pc(); // non-volatile_entry point
636 __ ldx(R3_RET, Rclass_or_obj, Roffset); 628 __ ldx(R3_RET, Rclass_or_obj, Roffset);
637 __ beq(CCR6, Lacquire); 629 __ beq(CCR6, Lacquire);
638 __ blr(); 630 __ blr();
639 } 631 }
640 632
641 if (branch_table[btos] == 0) { // generate only once 633 if (branch_table[btos] == 0) { // generate only once
642 __ align(32, 28, 28); // align load 634 __ align(32, 28, 28); // align load
643 __ fence(); // volatile entry point (one instruction before non-volatile_entry point) 635 __ sync(); // volatile entry point (one instruction before non-volatile_entry point)
644 branch_table[btos] = __ pc(); // non-volatile_entry point 636 branch_table[btos] = __ pc(); // non-volatile_entry point
645 __ lbzx(R3_RET, Rclass_or_obj, Roffset); 637 __ lbzx(R3_RET, Rclass_or_obj, Roffset);
646 __ extsb(R3_RET, R3_RET); 638 __ extsb(R3_RET, R3_RET);
647 __ beq(CCR6, Lacquire); 639 __ beq(CCR6, Lacquire);
648 __ blr(); 640 __ blr();
649 } 641 }
650 642
651 if (branch_table[ctos] == 0) { // generate only once 643 if (branch_table[ctos] == 0) { // generate only once
652 __ align(32, 28, 28); // align load 644 __ align(32, 28, 28); // align load
653 __ fence(); // volatile entry point (one instruction before non-volatile_entry point) 645 __ sync(); // volatile entry point (one instruction before non-volatile_entry point)
654 branch_table[ctos] = __ pc(); // non-volatile_entry point 646 branch_table[ctos] = __ pc(); // non-volatile_entry point
655 __ lhzx(R3_RET, Rclass_or_obj, Roffset); 647 __ lhzx(R3_RET, Rclass_or_obj, Roffset);
656 __ beq(CCR6, Lacquire); 648 __ beq(CCR6, Lacquire);
657 __ blr(); 649 __ blr();
658 } 650 }
659 651
660 if (branch_table[stos] == 0) { // generate only once 652 if (branch_table[stos] == 0) { // generate only once
661 __ align(32, 28, 28); // align load 653 __ align(32, 28, 28); // align load
662 __ fence(); // volatile entry point (one instruction before non-volatile_entry point) 654 __ sync(); // volatile entry point (one instruction before non-volatile_entry point)
663 branch_table[stos] = __ pc(); // non-volatile_entry point 655 branch_table[stos] = __ pc(); // non-volatile_entry point
664 __ lhax(R3_RET, Rclass_or_obj, Roffset); 656 __ lhax(R3_RET, Rclass_or_obj, Roffset);
665 __ beq(CCR6, Lacquire); 657 __ beq(CCR6, Lacquire);
666 __ blr(); 658 __ blr();
667 } 659 }
668 660
669 if (branch_table[atos] == 0) { // generate only once 661 if (branch_table[atos] == 0) { // generate only once
670 __ align(32, 28, 28); // align load 662 __ align(32, 28, 28); // align load
671 __ fence(); // volatile entry point (one instruction before non-volatile_entry point) 663 __ sync(); // volatile entry point (one instruction before non-volatile_entry point)
672 branch_table[atos] = __ pc(); // non-volatile_entry point 664 branch_table[atos] = __ pc(); // non-volatile_entry point
673 __ load_heap_oop(R3_RET, (RegisterOrConstant)Roffset, Rclass_or_obj); 665 __ load_heap_oop(R3_RET, (RegisterOrConstant)Roffset, Rclass_or_obj);
674 __ verify_oop(R3_RET); 666 __ verify_oop(R3_RET);
675 //__ dcbt(R3_RET); // prefetch 667 //__ dcbt(R3_RET); // prefetch
676 __ beq(CCR6, Lacquire); 668 __ beq(CCR6, Lacquire);
689 //tty->print_cr("accessor_entry: branch_table[%d] = 0x%llx (opcode 0x%llx)", i, branch_table[i], *((unsigned int*)branch_table[i])); 681 //tty->print_cr("accessor_entry: branch_table[%d] = 0x%llx (opcode 0x%llx)", i, branch_table[i], *((unsigned int*)branch_table[i]));
690 } 682 }
691 #endif 683 #endif
692 684
693 __ bind(Lslow_path); 685 __ bind(Lslow_path);
694 __ branch_to_entry(Interpreter::entry_for_kind(Interpreter::zerolocals), Rscratch); 686 assert(Interpreter::entry_for_kind(Interpreter::zerolocals), "Normal entry must have been generated by now");
687 __ load_const_optimized(Rscratch, Interpreter::entry_for_kind(Interpreter::zerolocals), R0);
688 __ mtctr(Rscratch);
689 __ bctr();
695 __ flush(); 690 __ flush();
696 691
697 return entry; 692 return entry;
698 } 693 }
699 694
776 771
777 __ blr(); 772 __ blr();
778 773
779 // Generate regular method entry. 774 // Generate regular method entry.
780 __ bind(slow_path); 775 __ bind(slow_path);
781 __ branch_to_entry(Interpreter::entry_for_kind(Interpreter::zerolocals), R11_scratch1); 776 assert(Interpreter::entry_for_kind(Interpreter::zerolocals), "Normal entry must have been generated by now");
777 __ load_const_optimized(R11_scratch1, Interpreter::entry_for_kind(Interpreter::zerolocals), R0);
778 __ mtctr(R11_scratch1);
779 __ bctr();
782 __ flush(); 780 __ flush();
783 781
784 return entry; 782 return entry;
785 } else { 783 } else {
786 return generate_accessor_entry(); 784 return generate_accessor_entry();