comparison src/share/vm/opto/memnode.cpp @ 17:ff5961f4c095

6395208: Elide autoboxing for calls to HashMap.get(int) and HashMap.get(long) Reviewed-by: kvn, rasbold
author never
date Wed, 05 Dec 2007 09:01:00 -0800
parents a61af66fc99e
children d5fc211aea19
comparison
equal deleted inserted replaced
16:f8236e79048a 17:ff5961f4c095
632 // LoadXNode::Identity will fold things back to the equivalence-class model 632 // LoadXNode::Identity will fold things back to the equivalence-class model
633 // of aliasing. 633 // of aliasing.
634 Node* MemNode::can_see_stored_value(Node* st, PhaseTransform* phase) const { 634 Node* MemNode::can_see_stored_value(Node* st, PhaseTransform* phase) const {
635 Node* ld_adr = in(MemNode::Address); 635 Node* ld_adr = in(MemNode::Address);
636 636
637 const TypeInstPtr* tp = phase->type(ld_adr)->isa_instptr();
638 Compile::AliasType* atp = tp != NULL ? phase->C->alias_type(tp) : NULL;
639 if (EliminateAutoBox && atp != NULL && atp->index() >= Compile::AliasIdxRaw &&
640 atp->field() != NULL && !atp->field()->is_volatile()) {
641 uint alias_idx = atp->index();
642 bool final = atp->field()->is_final();
643 Node* result = NULL;
644 Node* current = st;
645 // Skip through chains of MemBarNodes checking the MergeMems for
646 // new states for the slice of this load. Stop once any other
647 // kind of node is encountered. Loads from final memory can skip
648 // through any kind of MemBar but normal loads shouldn't skip
649 // through MemBarAcquire since the could allow them to move out of
650 // a synchronized region.
651 while (current->is_Proj()) {
652 int opc = current->in(0)->Opcode();
653 if ((final && opc == Op_MemBarAcquire) ||
654 opc == Op_MemBarRelease || opc == Op_MemBarCPUOrder) {
655 Node* mem = current->in(0)->in(TypeFunc::Memory);
656 if (mem->is_MergeMem()) {
657 MergeMemNode* merge = mem->as_MergeMem();
658 Node* new_st = merge->memory_at(alias_idx);
659 if (new_st == merge->base_memory()) {
660 // Keep searching
661 current = merge->base_memory();
662 continue;
663 }
664 // Save the new memory state for the slice and fall through
665 // to exit.
666 result = new_st;
667 }
668 }
669 break;
670 }
671 if (result != NULL) {
672 st = result;
673 }
674 }
675
676
637 // Loop around twice in the case Load -> Initialize -> Store. 677 // Loop around twice in the case Load -> Initialize -> Store.
638 // (See PhaseIterGVN::add_users_to_worklist, which knows about this case.) 678 // (See PhaseIterGVN::add_users_to_worklist, which knows about this case.)
639 for (int trip = 0; trip <= 1; trip++) { 679 for (int trip = 0; trip <= 1; trip++) {
640 680
641 if (st->is_Store()) { 681 if (st->is_Store()) {
721 return value; 761 return value;
722 } 762 }
723 return this; 763 return this;
724 } 764 }
725 765
766
767 // Returns true if the AliasType refers to the field that holds the
768 // cached box array. Currently only handles the IntegerCache case.
769 static bool is_autobox_cache(Compile::AliasType* atp) {
770 if (atp != NULL && atp->field() != NULL) {
771 ciField* field = atp->field();
772 ciSymbol* klass = field->holder()->name();
773 if (field->name() == ciSymbol::cache_field_name() &&
774 field->holder()->uses_default_loader() &&
775 klass == ciSymbol::java_lang_Integer_IntegerCache()) {
776 return true;
777 }
778 }
779 return false;
780 }
781
782 // Fetch the base value in the autobox array
783 static bool fetch_autobox_base(Compile::AliasType* atp, int& cache_offset) {
784 if (atp != NULL && atp->field() != NULL) {
785 ciField* field = atp->field();
786 ciSymbol* klass = field->holder()->name();
787 if (field->name() == ciSymbol::cache_field_name() &&
788 field->holder()->uses_default_loader() &&
789 klass == ciSymbol::java_lang_Integer_IntegerCache()) {
790 assert(field->is_constant(), "what?");
791 ciObjArray* array = field->constant_value().as_object()->as_obj_array();
792 // Fetch the box object at the base of the array and get its value
793 ciInstance* box = array->obj_at(0)->as_instance();
794 ciInstanceKlass* ik = box->klass()->as_instance_klass();
795 if (ik->nof_nonstatic_fields() == 1) {
796 // This should be true nonstatic_field_at requires calling
797 // nof_nonstatic_fields so check it anyway
798 ciConstant c = box->field_value(ik->nonstatic_field_at(0));
799 cache_offset = c.as_int();
800 }
801 return true;
802 }
803 }
804 return false;
805 }
806
807 // Returns true if the AliasType refers to the value field of an
808 // autobox object. Currently only handles Integer.
809 static bool is_autobox_object(Compile::AliasType* atp) {
810 if (atp != NULL && atp->field() != NULL) {
811 ciField* field = atp->field();
812 ciSymbol* klass = field->holder()->name();
813 if (field->name() == ciSymbol::value_name() &&
814 field->holder()->uses_default_loader() &&
815 klass == ciSymbol::java_lang_Integer()) {
816 return true;
817 }
818 }
819 return false;
820 }
821
822
823 // We're loading from an object which has autobox behaviour.
824 // If this object is result of a valueOf call we'll have a phi
825 // merging a newly allocated object and a load from the cache.
826 // We want to replace this load with the original incoming
827 // argument to the valueOf call.
828 Node* LoadNode::eliminate_autobox(PhaseGVN* phase) {
829 Node* base = in(Address)->in(AddPNode::Base);
830 if (base->is_Phi() && base->req() == 3) {
831 AllocateNode* allocation = NULL;
832 int allocation_index = -1;
833 int load_index = -1;
834 for (uint i = 1; i < base->req(); i++) {
835 allocation = AllocateNode::Ideal_allocation(base->in(i), phase);
836 if (allocation != NULL) {
837 allocation_index = i;
838 load_index = 3 - allocation_index;
839 break;
840 }
841 }
842 LoadNode* load = NULL;
843 if (allocation != NULL && base->in(load_index)->is_Load()) {
844 load = base->in(load_index)->as_Load();
845 }
846 if (load != NULL && in(Memory)->is_Phi() && in(Memory)->in(0) == base->in(0)) {
847 // Push the loads from the phi that comes from valueOf up
848 // through it to allow elimination of the loads and the recovery
849 // of the original value.
850 Node* mem_phi = in(Memory);
851 Node* offset = in(Address)->in(AddPNode::Offset);
852
853 Node* in1 = clone();
854 Node* in1_addr = in1->in(Address)->clone();
855 in1_addr->set_req(AddPNode::Base, base->in(allocation_index));
856 in1_addr->set_req(AddPNode::Address, base->in(allocation_index));
857 in1_addr->set_req(AddPNode::Offset, offset);
858 in1->set_req(0, base->in(allocation_index));
859 in1->set_req(Address, in1_addr);
860 in1->set_req(Memory, mem_phi->in(allocation_index));
861
862 Node* in2 = clone();
863 Node* in2_addr = in2->in(Address)->clone();
864 in2_addr->set_req(AddPNode::Base, base->in(load_index));
865 in2_addr->set_req(AddPNode::Address, base->in(load_index));
866 in2_addr->set_req(AddPNode::Offset, offset);
867 in2->set_req(0, base->in(load_index));
868 in2->set_req(Address, in2_addr);
869 in2->set_req(Memory, mem_phi->in(load_index));
870
871 in1_addr = phase->transform(in1_addr);
872 in1 = phase->transform(in1);
873 in2_addr = phase->transform(in2_addr);
874 in2 = phase->transform(in2);
875
876 PhiNode* result = PhiNode::make_blank(base->in(0), this);
877 result->set_req(allocation_index, in1);
878 result->set_req(load_index, in2);
879 return result;
880 }
881 } else if (base->is_Load()) {
882 // Eliminate the load of Integer.value for integers from the cache
883 // array by deriving the value from the index into the array.
884 // Capture the offset of the load and then reverse the computation.
885 Node* load_base = base->in(Address)->in(AddPNode::Base);
886 if (load_base != NULL) {
887 Compile::AliasType* atp = phase->C->alias_type(load_base->adr_type());
888 intptr_t cache_offset;
889 int shift = -1;
890 Node* cache = NULL;
891 if (is_autobox_cache(atp)) {
892 shift = exact_log2(type2aelembytes[T_OBJECT]);
893 cache = AddPNode::Ideal_base_and_offset(load_base->in(Address), phase, cache_offset);
894 }
895 if (cache != NULL && base->in(Address)->is_AddP()) {
896 Node* elements[4];
897 int count = base->in(Address)->as_AddP()->unpack_offsets(elements, ARRAY_SIZE(elements));
898 int cache_low;
899 if (count > 0 && fetch_autobox_base(atp, cache_low)) {
900 int offset = arrayOopDesc::base_offset_in_bytes(memory_type()) - (cache_low << shift);
901 // Add up all the offsets making of the address of the load
902 Node* result = elements[0];
903 for (int i = 1; i < count; i++) {
904 result = phase->transform(new (phase->C, 3) AddXNode(result, elements[i]));
905 }
906 // Remove the constant offset from the address and then
907 // remove the scaling of the offset to recover the original index.
908 result = phase->transform(new (phase->C, 3) AddXNode(result, phase->MakeConX(-offset)));
909 if (result->Opcode() == Op_LShiftX && result->in(2) == phase->intcon(shift)) {
910 // Peel the shift off directly but wrap it in a dummy node
911 // since Ideal can't return existing nodes
912 result = new (phase->C, 3) RShiftXNode(result->in(1), phase->intcon(0));
913 } else {
914 result = new (phase->C, 3) RShiftXNode(result, phase->intcon(shift));
915 }
916 #ifdef _LP64
917 result = new (phase->C, 2) ConvL2INode(phase->transform(result));
918 #endif
919 return result;
920 }
921 }
922 }
923 }
924 return NULL;
925 }
926
927
726 //------------------------------Ideal------------------------------------------ 928 //------------------------------Ideal------------------------------------------
727 // If the load is from Field memory and the pointer is non-null, we can 929 // If the load is from Field memory and the pointer is non-null, we can
728 // zero out the control input. 930 // zero out the control input.
729 // If the offset is constant and the base is an object allocation, 931 // If the offset is constant and the base is an object allocation,
730 // try to hook me up to the exact initializing store. 932 // try to hook me up to the exact initializing store.
750 if (base != NULL 952 if (base != NULL
751 && phase->type(base)->higher_equal(TypePtr::NOTNULL) 953 && phase->type(base)->higher_equal(TypePtr::NOTNULL)
752 && detect_dominating_control(base->in(0), phase->C->start())) { 954 && detect_dominating_control(base->in(0), phase->C->start())) {
753 // A method-invariant, non-null address (constant or 'this' argument). 955 // A method-invariant, non-null address (constant or 'this' argument).
754 set_req(MemNode::Control, NULL); 956 set_req(MemNode::Control, NULL);
957 }
958 }
959
960 if (EliminateAutoBox && can_reshape && in(Address)->is_AddP()) {
961 Node* base = in(Address)->in(AddPNode::Base);
962 if (base != NULL) {
963 Compile::AliasType* atp = phase->C->alias_type(adr_type());
964 if (is_autobox_object(atp)) {
965 Node* result = eliminate_autobox(phase);
966 if (result != NULL) return result;
967 }
755 } 968 }
756 } 969 }
757 970
758 // Check for prior store with a different base or offset; make Load 971 // Check for prior store with a different base or offset; make Load
759 // independent. Skip through any number of them. Bail out if the stores 972 // independent. Skip through any number of them. Bail out if the stores
856 // In any case, do not allow the join, per se, to empty out the type. 1069 // In any case, do not allow the join, per se, to empty out the type.
857 if (jt->empty() && !t->empty()) { 1070 if (jt->empty() && !t->empty()) {
858 // This can happen if a interface-typed array narrows to a class type. 1071 // This can happen if a interface-typed array narrows to a class type.
859 jt = _type; 1072 jt = _type;
860 } 1073 }
1074
1075 if (EliminateAutoBox) {
1076 // The pointers in the autobox arrays are always non-null
1077 Node* base = in(Address)->in(AddPNode::Base);
1078 if (base != NULL) {
1079 Compile::AliasType* atp = phase->C->alias_type(base->adr_type());
1080 if (is_autobox_cache(atp)) {
1081 return jt->join(TypePtr::NOTNULL)->is_ptr();
1082 }
1083 }
1084 }
861 return jt; 1085 return jt;
862 } 1086 }
863 } 1087 }
864 } else if (tp->base() == Type::InstPtr) { 1088 } else if (tp->base() == Type::InstPtr) {
865 assert( off != Type::OffsetBot || 1089 assert( off != Type::OffsetBot ||