comparison src/share/vm/opto/library_call.cpp @ 4771:22cee0ee8927

Merge
author kvn
date Fri, 06 Jan 2012 20:09:20 -0800
parents 52b5d32fbfaf 1dc233a8c7fe
children 35acf8f0a2e4
comparison
equal deleted inserted replaced
4755:2b3acb34791f 4771:22cee0ee8927
2163 float unlikely = PROB_UNLIKELY(0.999); 2163 float unlikely = PROB_UNLIKELY(0.999);
2164 2164
2165 IdealKit ideal(this); 2165 IdealKit ideal(this);
2166 #define __ ideal. 2166 #define __ ideal.
2167 2167
2168 const int reference_type_offset = instanceKlass::reference_type_offset_in_bytes() + 2168 const int reference_type_offset = in_bytes(instanceKlass::reference_type_offset());
2169 sizeof(oopDesc);
2170 2169
2171 Node* referent_off = __ ConX(java_lang_ref_Reference::referent_offset); 2170 Node* referent_off = __ ConX(java_lang_ref_Reference::referent_offset);
2172 2171
2173 __ if_then(offset, BoolTest::eq, referent_off, unlikely); { 2172 __ if_then(offset, BoolTest::eq, referent_off, unlikely); {
2174 __ if_then(base_oop, BoolTest::ne, null(), likely); { 2173 __ if_then(base_oop, BoolTest::ne, null(), likely); {
2804 if (stopped()) return true; // argument was like int.class 2803 if (stopped()) return true; // argument was like int.class
2805 2804
2806 // Note: The argument might still be an illegal value like 2805 // Note: The argument might still be an illegal value like
2807 // Serializable.class or Object[].class. The runtime will handle it. 2806 // Serializable.class or Object[].class. The runtime will handle it.
2808 // But we must make an explicit check for initialization. 2807 // But we must make an explicit check for initialization.
2809 Node* insp = basic_plus_adr(kls, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc)); 2808 Node* insp = basic_plus_adr(kls, in_bytes(instanceKlass::init_state_offset()));
2810 // Use T_BOOLEAN for instanceKlass::_init_state so the compiler 2809 // Use T_BOOLEAN for instanceKlass::_init_state so the compiler
2811 // can generate code to load it as unsigned byte. 2810 // can generate code to load it as unsigned byte.
2812 Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN); 2811 Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN);
2813 Node* bits = intcon(instanceKlass::fully_initialized); 2812 Node* bits = intcon(instanceKlass::fully_initialized);
2814 Node* test = _gvn.transform( new (C, 3) SubINode(inst, bits) ); 2813 Node* test = _gvn.transform( new (C, 3) SubINode(inst, bits) );
2954 } 2953 }
2955 2954
2956 //---------------------------load_mirror_from_klass---------------------------- 2955 //---------------------------load_mirror_from_klass----------------------------
2957 // Given a klass oop, load its java mirror (a java.lang.Class oop). 2956 // Given a klass oop, load its java mirror (a java.lang.Class oop).
2958 Node* LibraryCallKit::load_mirror_from_klass(Node* klass) { 2957 Node* LibraryCallKit::load_mirror_from_klass(Node* klass) {
2959 Node* p = basic_plus_adr(klass, Klass::java_mirror_offset_in_bytes() + sizeof(oopDesc)); 2958 Node* p = basic_plus_adr(klass, in_bytes(Klass::java_mirror_offset()));
2960 return make_load(NULL, p, TypeInstPtr::MIRROR, T_OBJECT); 2959 return make_load(NULL, p, TypeInstPtr::MIRROR, T_OBJECT);
2961 } 2960 }
2962 2961
2963 //-----------------------load_klass_from_mirror_common------------------------- 2962 //-----------------------load_klass_from_mirror_common-------------------------
2964 // Given a java mirror (a java.lang.Class oop), load its corresponding klass oop. 2963 // Given a java mirror (a java.lang.Class oop), load its corresponding klass oop.
2994 // Use this for JVM_ACC_INTERFACE, JVM_ACC_IS_CLONEABLE, JVM_ACC_HAS_FINALIZER. 2993 // Use this for JVM_ACC_INTERFACE, JVM_ACC_IS_CLONEABLE, JVM_ACC_HAS_FINALIZER.
2995 // Fall through if (mods & mask) == bits, take the guard otherwise. 2994 // Fall through if (mods & mask) == bits, take the guard otherwise.
2996 Node* LibraryCallKit::generate_access_flags_guard(Node* kls, int modifier_mask, int modifier_bits, RegionNode* region) { 2995 Node* LibraryCallKit::generate_access_flags_guard(Node* kls, int modifier_mask, int modifier_bits, RegionNode* region) {
2997 // Branch around if the given klass has the given modifier bit set. 2996 // Branch around if the given klass has the given modifier bit set.
2998 // Like generate_guard, adds a new path onto the region. 2997 // Like generate_guard, adds a new path onto the region.
2999 Node* modp = basic_plus_adr(kls, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc)); 2998 Node* modp = basic_plus_adr(kls, in_bytes(Klass::access_flags_offset()));
3000 Node* mods = make_load(NULL, modp, TypeInt::INT, T_INT); 2999 Node* mods = make_load(NULL, modp, TypeInt::INT, T_INT);
3001 Node* mask = intcon(modifier_mask); 3000 Node* mask = intcon(modifier_mask);
3002 Node* bits = intcon(modifier_bits); 3001 Node* bits = intcon(modifier_bits);
3003 Node* mbit = _gvn.transform( new (C, 3) AndINode(mods, mask) ); 3002 Node* mbit = _gvn.transform( new (C, 3) AndINode(mods, mask) );
3004 Node* cmp = _gvn.transform( new (C, 3) CmpINode(mbit, bits) ); 3003 Node* cmp = _gvn.transform( new (C, 3) CmpINode(mbit, bits) );
3115 query_value = gen_instanceof(obj, kls); 3114 query_value = gen_instanceof(obj, kls);
3116 _sp -= nargs; 3115 _sp -= nargs;
3117 break; 3116 break;
3118 3117
3119 case vmIntrinsics::_getModifiers: 3118 case vmIntrinsics::_getModifiers:
3120 p = basic_plus_adr(kls, Klass::modifier_flags_offset_in_bytes() + sizeof(oopDesc)); 3119 p = basic_plus_adr(kls, in_bytes(Klass::modifier_flags_offset()));
3121 query_value = make_load(NULL, p, TypeInt::INT, T_INT); 3120 query_value = make_load(NULL, p, TypeInt::INT, T_INT);
3122 break; 3121 break;
3123 3122
3124 case vmIntrinsics::_isInterface: 3123 case vmIntrinsics::_isInterface:
3125 // (To verify this code sequence, check the asserts in JVM_IsInterface.) 3124 // (To verify this code sequence, check the asserts in JVM_IsInterface.)
3155 phi->add_req(null()); 3154 phi->add_req(null());
3156 if (generate_array_guard(kls, region) != NULL) 3155 if (generate_array_guard(kls, region) != NULL)
3157 // A guard was added. If the guard is taken, it was an array. 3156 // A guard was added. If the guard is taken, it was an array.
3158 phi->add_req(makecon(TypeInstPtr::make(env()->Object_klass()->java_mirror()))); 3157 phi->add_req(makecon(TypeInstPtr::make(env()->Object_klass()->java_mirror())));
3159 // If we fall through, it's a plain class. Get its _super. 3158 // If we fall through, it's a plain class. Get its _super.
3160 p = basic_plus_adr(kls, Klass::super_offset_in_bytes() + sizeof(oopDesc)); 3159 p = basic_plus_adr(kls, in_bytes(Klass::super_offset()));
3161 kls = _gvn.transform( LoadKlassNode::make(_gvn, immutable_memory(), p, TypeRawPtr::BOTTOM, TypeKlassPtr::OBJECT_OR_NULL) ); 3160 kls = _gvn.transform( LoadKlassNode::make(_gvn, immutable_memory(), p, TypeRawPtr::BOTTOM, TypeKlassPtr::OBJECT_OR_NULL) );
3162 null_ctl = top(); 3161 null_ctl = top();
3163 kls = null_check_oop(kls, &null_ctl); 3162 kls = null_check_oop(kls, &null_ctl);
3164 if (null_ctl != top()) { 3163 if (null_ctl != top()) {
3165 // If the guard is taken, Object.superClass is null (both klass and mirror). 3164 // If the guard is taken, Object.superClass is null (both klass and mirror).
3173 3172
3174 case vmIntrinsics::_getComponentType: 3173 case vmIntrinsics::_getComponentType:
3175 if (generate_array_guard(kls, region) != NULL) { 3174 if (generate_array_guard(kls, region) != NULL) {
3176 // Be sure to pin the oop load to the guard edge just created: 3175 // Be sure to pin the oop load to the guard edge just created:
3177 Node* is_array_ctrl = region->in(region->req()-1); 3176 Node* is_array_ctrl = region->in(region->req()-1);
3178 Node* cma = basic_plus_adr(kls, in_bytes(arrayKlass::component_mirror_offset()) + sizeof(oopDesc)); 3177 Node* cma = basic_plus_adr(kls, in_bytes(arrayKlass::component_mirror_offset()));
3179 Node* cmo = make_load(is_array_ctrl, cma, TypeInstPtr::MIRROR, T_OBJECT); 3178 Node* cmo = make_load(is_array_ctrl, cma, TypeInstPtr::MIRROR, T_OBJECT);
3180 phi->add_req(cmo); 3179 phi->add_req(cmo);
3181 } 3180 }
3182 query_value = null(); // non-array case is null 3181 query_value = null(); // non-array case is null
3183 break; 3182 break;
3184 3183
3185 case vmIntrinsics::_getClassAccessFlags: 3184 case vmIntrinsics::_getClassAccessFlags:
3186 p = basic_plus_adr(kls, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc)); 3185 p = basic_plus_adr(kls, in_bytes(Klass::access_flags_offset()));
3187 query_value = make_load(NULL, p, TypeInt::INT, T_INT); 3186 query_value = make_load(NULL, p, TypeInt::INT, T_INT);
3188 break; 3187 break;
3189 3188
3190 default: 3189 default:
3191 ShouldNotReachHere(); 3190 ShouldNotReachHere();
4194 void LibraryCallKit::copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark) { 4193 void LibraryCallKit::copy_to_clone(Node* obj, Node* alloc_obj, Node* obj_size, bool is_array, bool card_mark) {
4195 assert(obj_size != NULL, ""); 4194 assert(obj_size != NULL, "");
4196 Node* raw_obj = alloc_obj->in(1); 4195 Node* raw_obj = alloc_obj->in(1);
4197 assert(alloc_obj->is_CheckCastPP() && raw_obj->is_Proj() && raw_obj->in(0)->is_Allocate(), ""); 4196 assert(alloc_obj->is_CheckCastPP() && raw_obj->is_Proj() && raw_obj->in(0)->is_Allocate(), "");
4198 4197
4198 AllocateNode* alloc = NULL;
4199 if (ReduceBulkZeroing) { 4199 if (ReduceBulkZeroing) {
4200 // We will be completely responsible for initializing this object - 4200 // We will be completely responsible for initializing this object -
4201 // mark Initialize node as complete. 4201 // mark Initialize node as complete.
4202 AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_obj, &_gvn); 4202 alloc = AllocateNode::Ideal_allocation(alloc_obj, &_gvn);
4203 // The object was just allocated - there should be no any stores! 4203 // The object was just allocated - there should be no any stores!
4204 guarantee(alloc != NULL && alloc->maybe_set_complete(&_gvn), ""); 4204 guarantee(alloc != NULL && alloc->maybe_set_complete(&_gvn), "");
4205 // Mark as complete_with_arraycopy so that on AllocateNode
4206 // expansion, we know this AllocateNode is initialized by an array
4207 // copy and a StoreStore barrier exists after the array copy.
4208 alloc->initialization()->set_complete_with_arraycopy();
4205 } 4209 }
4206 4210
4207 // Copy the fastest available way. 4211 // Copy the fastest available way.
4208 // TODO: generate fields copies for small objects instead. 4212 // TODO: generate fields copies for small objects instead.
4209 Node* src = obj; 4213 Node* src = obj;
4261 T_OBJECT, 4265 T_OBJECT,
4262 false); 4266 false);
4263 } 4267 }
4264 4268
4265 // Do not let reads from the cloned object float above the arraycopy. 4269 // Do not let reads from the cloned object float above the arraycopy.
4266 insert_mem_bar(Op_MemBarCPUOrder); 4270 if (alloc != NULL) {
4271 // Do not let stores that initialize this object be reordered with
4272 // a subsequent store that would make this object accessible by
4273 // other threads.
4274 // Record what AllocateNode this StoreStore protects so that
4275 // escape analysis can go from the MemBarStoreStoreNode to the
4276 // AllocateNode and eliminate the MemBarStoreStoreNode if possible
4277 // based on the escape status of the AllocateNode.
4278 insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out(AllocateNode::RawAddress));
4279 } else {
4280 insert_mem_bar(Op_MemBarCPUOrder);
4281 }
4267 } 4282 }
4268 4283
4269 //------------------------inline_native_clone---------------------------- 4284 //------------------------inline_native_clone----------------------------
4270 // Here are the simple edge cases: 4285 // Here are the simple edge cases:
4271 // null receiver => normal trap 4286 // null receiver => normal trap
4857 // Plug failing path into checked_oop_disjoint_arraycopy 4872 // Plug failing path into checked_oop_disjoint_arraycopy
4858 if (not_subtype_ctrl != top()) { 4873 if (not_subtype_ctrl != top()) {
4859 PreserveJVMState pjvms(this); 4874 PreserveJVMState pjvms(this);
4860 set_control(not_subtype_ctrl); 4875 set_control(not_subtype_ctrl);
4861 // (At this point we can assume disjoint_bases, since types differ.) 4876 // (At this point we can assume disjoint_bases, since types differ.)
4862 int ek_offset = objArrayKlass::element_klass_offset_in_bytes() + sizeof(oopDesc); 4877 int ek_offset = in_bytes(objArrayKlass::element_klass_offset());
4863 Node* p1 = basic_plus_adr(dest_klass, ek_offset); 4878 Node* p1 = basic_plus_adr(dest_klass, ek_offset);
4864 Node* n1 = LoadKlassNode::make(_gvn, immutable_memory(), p1, TypeRawPtr::BOTTOM); 4879 Node* n1 = LoadKlassNode::make(_gvn, immutable_memory(), p1, TypeRawPtr::BOTTOM);
4865 Node* dest_elem_klass = _gvn.transform(n1); 4880 Node* dest_elem_klass = _gvn.transform(n1);
4866 Node* cv = generate_checkcast_arraycopy(adr_type, 4881 Node* cv = generate_checkcast_arraycopy(adr_type,
4867 dest_elem_klass, 4882 dest_elem_klass,
5004 // The next memory barrier is added to avoid it. If the arraycopy can be 5019 // The next memory barrier is added to avoid it. If the arraycopy can be
5005 // optimized away (which it can, sometimes) then we can manually remove 5020 // optimized away (which it can, sometimes) then we can manually remove
5006 // the membar also. 5021 // the membar also.
5007 // 5022 //
5008 // Do not let reads from the cloned object float above the arraycopy. 5023 // Do not let reads from the cloned object float above the arraycopy.
5009 if (InsertMemBarAfterArraycopy || alloc != NULL) 5024 if (alloc != NULL) {
5025 // Do not let stores that initialize this object be reordered with
5026 // a subsequent store that would make this object accessible by
5027 // other threads.
5028 // Record what AllocateNode this StoreStore protects so that
5029 // escape analysis can go from the MemBarStoreStoreNode to the
5030 // AllocateNode and eliminate the MemBarStoreStoreNode if possible
5031 // based on the escape status of the AllocateNode.
5032 insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out(AllocateNode::RawAddress));
5033 } else if (InsertMemBarAfterArraycopy)
5010 insert_mem_bar(Op_MemBarCPUOrder); 5034 insert_mem_bar(Op_MemBarCPUOrder);
5011 } 5035 }
5012 5036
5013 5037
5014 // Helper function which determines if an arraycopy immediately follows 5038 // Helper function which determines if an arraycopy immediately follows
5308 5332
5309 // Pick out the parameters required to perform a store-check 5333 // Pick out the parameters required to perform a store-check
5310 // for the target array. This is an optimistic check. It will 5334 // for the target array. This is an optimistic check. It will
5311 // look in each non-null element's class, at the desired klass's 5335 // look in each non-null element's class, at the desired klass's
5312 // super_check_offset, for the desired klass. 5336 // super_check_offset, for the desired klass.
5313 int sco_offset = Klass::super_check_offset_offset_in_bytes() + sizeof(oopDesc); 5337 int sco_offset = in_bytes(Klass::super_check_offset_offset());
5314 Node* p3 = basic_plus_adr(dest_elem_klass, sco_offset); 5338 Node* p3 = basic_plus_adr(dest_elem_klass, sco_offset);
5315 Node* n3 = new(C, 3) LoadINode(NULL, memory(p3), p3, _gvn.type(p3)->is_ptr()); 5339 Node* n3 = new(C, 3) LoadINode(NULL, memory(p3), p3, _gvn.type(p3)->is_ptr());
5316 Node* check_offset = ConvI2X(_gvn.transform(n3)); 5340 Node* check_offset = ConvI2X(_gvn.transform(n3));
5317 Node* check_value = dest_elem_klass; 5341 Node* check_value = dest_elem_klass;
5318 5342