comparison src/cpu/sparc/vm/stubGenerator_sparc.cpp @ 10997:46c544b8fbfc

8008407: remove SPARC V8 support Summary: Removed most of the SPARC V8 instructions Reviewed-by: kvn, twisti
author morris
date Fri, 07 Jun 2013 16:46:37 -0700
parents d2f8c38e543d
children 980532a806a5
comparison
equal deleted inserted replaced
10996:ea60d1de6735 10997:46c544b8fbfc
564 564
565 address generate_flush_callers_register_windows() { 565 address generate_flush_callers_register_windows() {
566 StubCodeMark mark(this, "StubRoutines", "flush_callers_register_windows"); 566 StubCodeMark mark(this, "StubRoutines", "flush_callers_register_windows");
567 address start = __ pc(); 567 address start = __ pc();
568 568
569 __ flush_windows(); 569 __ flushw();
570 __ retl(false); 570 __ retl(false);
571 __ delayed()->add( FP, STACK_BIAS, O0 ); 571 __ delayed()->add( FP, STACK_BIAS, O0 );
572 // The returned value must be a stack pointer whose register save area 572 // The returned value must be a stack pointer whose register save area
573 // is flushed, and will stay flushed while the caller executes. 573 // is flushed, and will stay flushed while the caller executes.
574 574
575 return start; 575 return start;
576 } 576 }
577 577
578 // Helper functions for v8 atomic operations.
579 //
580 void get_v8_oop_lock_ptr(Register lock_ptr_reg, Register mark_oop_reg, Register scratch_reg) {
581 if (mark_oop_reg == noreg) {
582 address lock_ptr = (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr();
583 __ set((intptr_t)lock_ptr, lock_ptr_reg);
584 } else {
585 assert(scratch_reg != noreg, "just checking");
586 address lock_ptr = (address)StubRoutines::Sparc::_v8_oop_lock_cache;
587 __ set((intptr_t)lock_ptr, lock_ptr_reg);
588 __ and3(mark_oop_reg, StubRoutines::Sparc::v8_oop_lock_mask_in_place, scratch_reg);
589 __ add(lock_ptr_reg, scratch_reg, lock_ptr_reg);
590 }
591 }
592
593 void generate_v8_lock_prologue(Register lock_reg, Register lock_ptr_reg, Register yield_reg, Label& retry, Label& dontyield, Register mark_oop_reg = noreg, Register scratch_reg = noreg) {
594
595 get_v8_oop_lock_ptr(lock_ptr_reg, mark_oop_reg, scratch_reg);
596 __ set(StubRoutines::Sparc::locked, lock_reg);
597 // Initialize yield counter
598 __ mov(G0,yield_reg);
599
600 __ BIND(retry);
601 __ cmp_and_br_short(yield_reg, V8AtomicOperationUnderLockSpinCount, Assembler::less, Assembler::pt, dontyield);
602
603 // This code can only be called from inside the VM, this
604 // stub is only invoked from Atomic::add(). We do not
605 // want to use call_VM, because _last_java_sp and such
606 // must already be set.
607 //
608 // Save the regs and make space for a C call
609 __ save(SP, -96, SP);
610 __ save_all_globals_into_locals();
611 BLOCK_COMMENT("call os::naked_sleep");
612 __ call(CAST_FROM_FN_PTR(address, os::naked_sleep));
613 __ delayed()->nop();
614 __ restore_globals_from_locals();
615 __ restore();
616 // reset the counter
617 __ mov(G0,yield_reg);
618
619 __ BIND(dontyield);
620
621 // try to get lock
622 __ swap(lock_ptr_reg, 0, lock_reg);
623
624 // did we get the lock?
625 __ cmp(lock_reg, StubRoutines::Sparc::unlocked);
626 __ br(Assembler::notEqual, true, Assembler::pn, retry);
627 __ delayed()->add(yield_reg,1,yield_reg);
628
629 // yes, got lock. do the operation here.
630 }
631
632 void generate_v8_lock_epilogue(Register lock_reg, Register lock_ptr_reg, Register yield_reg, Label& retry, Label& dontyield, Register mark_oop_reg = noreg, Register scratch_reg = noreg) {
633 __ st(lock_reg, lock_ptr_reg, 0); // unlock
634 }
635
636 // Support for jint Atomic::xchg(jint exchange_value, volatile jint* dest). 578 // Support for jint Atomic::xchg(jint exchange_value, volatile jint* dest).
637 // 579 //
638 // Arguments : 580 // Arguments:
639 // 581 //
640 // exchange_value: O0 582 // exchange_value: O0
641 // dest: O1 583 // dest: O1
642 // 584 //
643 // Results: 585 // Results:
654 Label retry; 596 Label retry;
655 __ BIND(retry); 597 __ BIND(retry);
656 __ mov(O0, O3); // scratch copy of exchange value 598 __ mov(O0, O3); // scratch copy of exchange value
657 __ ld(O1, 0, O2); // observe the previous value 599 __ ld(O1, 0, O2); // observe the previous value
658 // try to replace O2 with O3 600 // try to replace O2 with O3
659 __ cas_under_lock(O1, O2, O3, 601 __ cas(O1, O2, O3);
660 (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr(),false);
661 __ cmp_and_br_short(O2, O3, Assembler::notEqual, Assembler::pn, retry); 602 __ cmp_and_br_short(O2, O3, Assembler::notEqual, Assembler::pn, retry);
662 603
663 __ retl(false); 604 __ retl(false);
664 __ delayed()->mov(O2, O0); // report previous value to caller 605 __ delayed()->mov(O2, O0); // report previous value to caller
665
666 } else { 606 } else {
667 if (VM_Version::v9_instructions_work()) { 607 __ retl(false);
668 __ retl(false); 608 __ delayed()->swap(O1, 0, O0);
669 __ delayed()->swap(O1, 0, O0);
670 } else {
671 const Register& lock_reg = O2;
672 const Register& lock_ptr_reg = O3;
673 const Register& yield_reg = O4;
674
675 Label retry;
676 Label dontyield;
677
678 generate_v8_lock_prologue(lock_reg, lock_ptr_reg, yield_reg, retry, dontyield);
679 // got the lock, do the swap
680 __ swap(O1, 0, O0);
681
682 generate_v8_lock_epilogue(lock_reg, lock_ptr_reg, yield_reg, retry, dontyield);
683 __ retl(false);
684 __ delayed()->nop();
685 }
686 } 609 }
687 610
688 return start; 611 return start;
689 } 612 }
690 613
691 614
692 // Support for jint Atomic::cmpxchg(jint exchange_value, volatile jint* dest, jint compare_value) 615 // Support for jint Atomic::cmpxchg(jint exchange_value, volatile jint* dest, jint compare_value)
693 // 616 //
694 // Arguments : 617 // Arguments:
695 // 618 //
696 // exchange_value: O0 619 // exchange_value: O0
697 // dest: O1 620 // dest: O1
698 // compare_value: O2 621 // compare_value: O2
699 // 622 //
700 // Results: 623 // Results:
701 // 624 //
702 // O0: the value previously stored in dest 625 // O0: the value previously stored in dest
703 // 626 //
704 // Overwrites (v8): O3,O4,O5
705 //
706 address generate_atomic_cmpxchg() { 627 address generate_atomic_cmpxchg() {
707 StubCodeMark mark(this, "StubRoutines", "atomic_cmpxchg"); 628 StubCodeMark mark(this, "StubRoutines", "atomic_cmpxchg");
708 address start = __ pc(); 629 address start = __ pc();
709 630
710 // cmpxchg(dest, compare_value, exchange_value) 631 // cmpxchg(dest, compare_value, exchange_value)
711 __ cas_under_lock(O1, O2, O0, 632 __ cas(O1, O2, O0);
712 (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr(),false);
713 __ retl(false); 633 __ retl(false);
714 __ delayed()->nop(); 634 __ delayed()->nop();
715 635
716 return start; 636 return start;
717 } 637 }
718 638
719 // Support for jlong Atomic::cmpxchg(jlong exchange_value, volatile jlong *dest, jlong compare_value) 639 // Support for jlong Atomic::cmpxchg(jlong exchange_value, volatile jlong *dest, jlong compare_value)
720 // 640 //
721 // Arguments : 641 // Arguments:
722 // 642 //
723 // exchange_value: O1:O0 643 // exchange_value: O1:O0
724 // dest: O2 644 // dest: O2
725 // compare_value: O4:O3 645 // compare_value: O4:O3
726 // 646 //
727 // Results: 647 // Results:
728 // 648 //
729 // O1:O0: the value previously stored in dest 649 // O1:O0: the value previously stored in dest
730 // 650 //
731 // This only works on V9, on V8 we don't generate any
732 // code and just return NULL.
733 //
734 // Overwrites: G1,G2,G3 651 // Overwrites: G1,G2,G3
735 // 652 //
736 address generate_atomic_cmpxchg_long() { 653 address generate_atomic_cmpxchg_long() {
737 StubCodeMark mark(this, "StubRoutines", "atomic_cmpxchg_long"); 654 StubCodeMark mark(this, "StubRoutines", "atomic_cmpxchg_long");
738 address start = __ pc(); 655 address start = __ pc();
739 656
740 if (!VM_Version::supports_cx8())
741 return NULL;;
742 __ sllx(O0, 32, O0); 657 __ sllx(O0, 32, O0);
743 __ srl(O1, 0, O1); 658 __ srl(O1, 0, O1);
744 __ or3(O0,O1,O0); // O0 holds 64-bit value from compare_value 659 __ or3(O0,O1,O0); // O0 holds 64-bit value from compare_value
745 __ sllx(O3, 32, O3); 660 __ sllx(O3, 32, O3);
746 __ srl(O4, 0, O4); 661 __ srl(O4, 0, O4);
754 } 669 }
755 670
756 671
757 // Support for jint Atomic::add(jint add_value, volatile jint* dest). 672 // Support for jint Atomic::add(jint add_value, volatile jint* dest).
758 // 673 //
759 // Arguments : 674 // Arguments:
760 // 675 //
761 // add_value: O0 (e.g., +1 or -1) 676 // add_value: O0 (e.g., +1 or -1)
762 // dest: O1 677 // dest: O1
763 // 678 //
764 // Results: 679 // Results:
765 // 680 //
766 // O0: the new value stored in dest 681 // O0: the new value stored in dest
767 // 682 //
768 // Overwrites (v9): O3 683 // Overwrites: O3
769 // Overwrites (v8): O3,O4,O5
770 // 684 //
771 address generate_atomic_add() { 685 address generate_atomic_add() {
772 StubCodeMark mark(this, "StubRoutines", "atomic_add"); 686 StubCodeMark mark(this, "StubRoutines", "atomic_add");
773 address start = __ pc(); 687 address start = __ pc();
774 __ BIND(_atomic_add_stub); 688 __ BIND(_atomic_add_stub);
775 689
776 if (VM_Version::v9_instructions_work()) { 690 Label(retry);
777 Label(retry); 691 __ BIND(retry);
778 __ BIND(retry); 692
779 693 __ lduw(O1, 0, O2);
780 __ lduw(O1, 0, O2); 694 __ add(O0, O2, O3);
781 __ add(O0, O2, O3); 695 __ cas(O1, O2, O3);
782 __ cas(O1, O2, O3); 696 __ cmp_and_br_short(O2, O3, Assembler::notEqual, Assembler::pn, retry);
783 __ cmp_and_br_short(O2, O3, Assembler::notEqual, Assembler::pn, retry); 697 __ retl(false);
784 __ retl(false); 698 __ delayed()->add(O0, O2, O0); // note that cas made O2==O3
785 __ delayed()->add(O0, O2, O0); // note that cas made O2==O3
786 } else {
787 const Register& lock_reg = O2;
788 const Register& lock_ptr_reg = O3;
789 const Register& value_reg = O4;
790 const Register& yield_reg = O5;
791
792 Label(retry);
793 Label(dontyield);
794
795 generate_v8_lock_prologue(lock_reg, lock_ptr_reg, yield_reg, retry, dontyield);
796 // got lock, do the increment
797 __ ld(O1, 0, value_reg);
798 __ add(O0, value_reg, value_reg);
799 __ st(value_reg, O1, 0);
800
801 // %%% only for RMO and PSO
802 __ membar(Assembler::StoreStore);
803
804 generate_v8_lock_epilogue(lock_reg, lock_ptr_reg, yield_reg, retry, dontyield);
805
806 __ retl(false);
807 __ delayed()->mov(value_reg, O0);
808 }
809 699
810 return start; 700 return start;
811 } 701 }
812 Label _atomic_add_stub; // called from other stubs 702 Label _atomic_add_stub; // called from other stubs
813 703
839 __ mov(G1, L1); 729 __ mov(G1, L1);
840 __ mov(G2, L2); 730 __ mov(G2, L2);
841 __ mov(G3, L3); 731 __ mov(G3, L3);
842 __ mov(G4, L4); 732 __ mov(G4, L4);
843 __ mov(G5, L5); 733 __ mov(G5, L5);
844 for (i = 0; i < (VM_Version::v9_instructions_work() ? 64 : 32); i += 2) { 734 for (i = 0; i < 64; i += 2) {
845 __ stf(FloatRegisterImpl::D, as_FloatRegister(i), preserve_addr, i * wordSize); 735 __ stf(FloatRegisterImpl::D, as_FloatRegister(i), preserve_addr, i * wordSize);
846 } 736 }
847 737
848 address entry_point = CAST_FROM_FN_PTR(address, handle_unsafe_access); 738 address entry_point = CAST_FROM_FN_PTR(address, handle_unsafe_access);
849 BLOCK_COMMENT("call handle_unsafe_access"); 739 BLOCK_COMMENT("call handle_unsafe_access");
853 __ mov(L1, G1); 743 __ mov(L1, G1);
854 __ mov(L2, G2); 744 __ mov(L2, G2);
855 __ mov(L3, G3); 745 __ mov(L3, G3);
856 __ mov(L4, G4); 746 __ mov(L4, G4);
857 __ mov(L5, G5); 747 __ mov(L5, G5);
858 for (i = 0; i < (VM_Version::v9_instructions_work() ? 64 : 32); i += 2) { 748 for (i = 0; i < 64; i += 2) {
859 __ ldf(FloatRegisterImpl::D, preserve_addr, as_FloatRegister(i), i * wordSize); 749 __ ldf(FloatRegisterImpl::D, preserve_addr, as_FloatRegister(i), i * wordSize);
860 } 750 }
861 751
862 __ verify_thread(); 752 __ verify_thread();
863 753