comparison src/share/vm/opto/library_call.cpp @ 902:fc2281ddce3c

6868269: CompileTheWorld assertion failure introduced by the reexecute bit implementation Summary: Improvement on reexecute implementation to fix the assertion failure Reviewed-by: kvn, never
author cfang
date Tue, 04 Aug 2009 21:32:08 -0700
parents 9987d9d5eb0e
children 8fe1963e3964
comparison
equal deleted inserted replaced
901:2b9164d13ce9 902:fc2281ddce3c
3167 Node* original = argument(0); 3167 Node* original = argument(0);
3168 Node* start = is_copyOfRange? argument(1): intcon(0); 3168 Node* start = is_copyOfRange? argument(1): intcon(0);
3169 Node* end = is_copyOfRange? argument(2): argument(1); 3169 Node* end = is_copyOfRange? argument(2): argument(1);
3170 Node* array_type_mirror = is_copyOfRange? argument(3): argument(2); 3170 Node* array_type_mirror = is_copyOfRange? argument(3): argument(2);
3171 3171
3172 _sp += nargs; // set original stack for use by uncommon_trap 3172 Node* newcopy;
3173 array_type_mirror = do_null_check(array_type_mirror, T_OBJECT); 3173
3174 original = do_null_check(original, T_OBJECT); 3174 //set the original stack and the reexecute bit for the interpreter to reexecute
3175 _sp -= nargs; 3175 //the bytecode that invokes Arrays.copyOf if deoptimization happens
3176 3176 { PreserveReexecuteState preexecs(this);
3177 // Check if a null path was taken unconditionally. 3177 _sp += nargs;
3178 if (stopped()) return true; 3178 jvms()->set_should_reexecute(true);
3179 3179
3180 Node* orig_length = load_array_length(original); 3180 array_type_mirror = do_null_check(array_type_mirror, T_OBJECT);
3181 3181 original = do_null_check(original, T_OBJECT);
3182 Node* klass_node = load_klass_from_mirror(array_type_mirror, false, nargs, 3182
3183 NULL, 0); 3183 // Check if a null path was taken unconditionally.
3184 _sp += nargs; // set original stack for use by uncommon_trap 3184 if (stopped()) return true;
3185 klass_node = do_null_check(klass_node, T_OBJECT); 3185
3186 _sp -= nargs; 3186 Node* orig_length = load_array_length(original);
3187 3187
3188 RegionNode* bailout = new (C, 1) RegionNode(1); 3188 Node* klass_node = load_klass_from_mirror(array_type_mirror, false, 0,
3189 record_for_igvn(bailout); 3189 NULL, 0);
3190 3190 klass_node = do_null_check(klass_node, T_OBJECT);
3191 // Despite the generic type of Arrays.copyOf, the mirror might be int, int[], etc. 3191
3192 // Bail out if that is so. 3192 RegionNode* bailout = new (C, 1) RegionNode(1);
3193 Node* not_objArray = generate_non_objArray_guard(klass_node, bailout); 3193 record_for_igvn(bailout);
3194 if (not_objArray != NULL) { 3194
3195 // Improve the klass node's type from the new optimistic assumption: 3195 // Despite the generic type of Arrays.copyOf, the mirror might be int, int[], etc.
3196 ciKlass* ak = ciArrayKlass::make(env()->Object_klass()); 3196 // Bail out if that is so.
3197 const Type* akls = TypeKlassPtr::make(TypePtr::NotNull, ak, 0/*offset*/); 3197 Node* not_objArray = generate_non_objArray_guard(klass_node, bailout);
3198 Node* cast = new (C, 2) CastPPNode(klass_node, akls); 3198 if (not_objArray != NULL) {
3199 cast->init_req(0, control()); 3199 // Improve the klass node's type from the new optimistic assumption:
3200 klass_node = _gvn.transform(cast); 3200 ciKlass* ak = ciArrayKlass::make(env()->Object_klass());
3201 } 3201 const Type* akls = TypeKlassPtr::make(TypePtr::NotNull, ak, 0/*offset*/);
3202 3202 Node* cast = new (C, 2) CastPPNode(klass_node, akls);
3203 // Bail out if either start or end is negative. 3203 cast->init_req(0, control());
3204 generate_negative_guard(start, bailout, &start); 3204 klass_node = _gvn.transform(cast);
3205 generate_negative_guard(end, bailout, &end); 3205 }
3206 3206
3207 Node* length = end; 3207 // Bail out if either start or end is negative.
3208 if (_gvn.type(start) != TypeInt::ZERO) { 3208 generate_negative_guard(start, bailout, &start);
3209 length = _gvn.transform( new (C, 3) SubINode(end, start) ); 3209 generate_negative_guard(end, bailout, &end);
3210 } 3210
3211 3211 Node* length = end;
3212 // Bail out if length is negative. 3212 if (_gvn.type(start) != TypeInt::ZERO) {
3213 // ...Not needed, since the new_array will throw the right exception. 3213 length = _gvn.transform( new (C, 3) SubINode(end, start) );
3214 //generate_negative_guard(length, bailout, &length); 3214 }
3215 3215
3216 if (bailout->req() > 1) { 3216 // Bail out if length is negative.
3217 PreserveJVMState pjvms(this); 3217 // ...Not needed, since the new_array will throw the right exception.
3218 set_control( _gvn.transform(bailout) ); 3218 //generate_negative_guard(length, bailout, &length);
3219 _sp += nargs; // push the arguments back on the stack 3219
3220 uncommon_trap(Deoptimization::Reason_intrinsic, 3220 if (bailout->req() > 1) {
3221 Deoptimization::Action_maybe_recompile); 3221 PreserveJVMState pjvms(this);
3222 } 3222 set_control( _gvn.transform(bailout) );
3223 3223 uncommon_trap(Deoptimization::Reason_intrinsic,
3224 if (!stopped()) { 3224 Deoptimization::Action_maybe_recompile);
3225 Node *newcopy; 3225 }
3226 //set the original stack and the reexecute bit for the interpreter to reexecute 3226
3227 //the bytecode that invokes Arrays.copyOf if deoptimization happens 3227 if (!stopped()) {
3228 { PreserveReexecuteState preexecs(this);
3229 _sp += nargs;
3230 jvms()->set_should_reexecute(true);
3231 3228
3232 // How many elements will we copy from the original? 3229 // How many elements will we copy from the original?
3233 // The answer is MinI(orig_length - start, length). 3230 // The answer is MinI(orig_length - start, length).
3234 Node* orig_tail = _gvn.transform( new(C, 3) SubINode(orig_length, start) ); 3231 Node* orig_tail = _gvn.transform( new(C, 3) SubINode(orig_length, start) );
3235 Node* moved = generate_min_max(vmIntrinsics::_min, orig_tail, length); 3232 Node* moved = generate_min_max(vmIntrinsics::_min, orig_tail, length);
3245 bool disjoint_bases = true; 3242 bool disjoint_bases = true;
3246 bool length_never_negative = true; 3243 bool length_never_negative = true;
3247 generate_arraycopy(TypeAryPtr::OOPS, T_OBJECT, 3244 generate_arraycopy(TypeAryPtr::OOPS, T_OBJECT,
3248 original, start, newcopy, intcon(0), moved, 3245 original, start, newcopy, intcon(0), moved,
3249 disjoint_bases, length_never_negative); 3246 disjoint_bases, length_never_negative);
3250 } //original reexecute and sp are set back here 3247 }
3251 3248 } //original reexecute and sp are set back here
3249
3250 if(!stopped()) {
3252 push(newcopy); 3251 push(newcopy);
3253 } 3252 }
3254 3253
3255 C->set_has_split_ifs(true); // Has chance for split-if optimization 3254 C->set_has_split_ifs(true); // Has chance for split-if optimization
3256 3255
3998 // These steps fold up nicely if and when the cloned object's klass 3997 // These steps fold up nicely if and when the cloned object's klass
3999 // can be sharply typed as an object array, a type array, or an instance. 3998 // can be sharply typed as an object array, a type array, or an instance.
4000 // 3999 //
4001 bool LibraryCallKit::inline_native_clone(bool is_virtual) { 4000 bool LibraryCallKit::inline_native_clone(bool is_virtual) {
4002 int nargs = 1; 4001 int nargs = 1;
4003 Node* obj = null_check_receiver(callee()); 4002 PhiNode* result_val;
4004 if (stopped()) return true;
4005 Node* obj_klass = load_object_klass(obj);
4006 const TypeKlassPtr* tklass = _gvn.type(obj_klass)->isa_klassptr();
4007 const TypeOopPtr* toop = ((tklass != NULL)
4008 ? tklass->as_instance_type()
4009 : TypeInstPtr::NOTNULL);
4010
4011 // Conservatively insert a memory barrier on all memory slices.
4012 // Do not let writes into the original float below the clone.
4013 insert_mem_bar(Op_MemBarCPUOrder);
4014
4015 // paths into result_reg:
4016 enum {
4017 _slow_path = 1, // out-of-line call to clone method (virtual or not)
4018 _objArray_path, // plain array allocation, plus arrayof_oop_arraycopy
4019 _array_path, // plain array allocation, plus arrayof_long_arraycopy
4020 _instance_path, // plain instance allocation, plus arrayof_long_arraycopy
4021 PATH_LIMIT
4022 };
4023 RegionNode* result_reg = new(C, PATH_LIMIT) RegionNode(PATH_LIMIT);
4024 PhiNode* result_val = new(C, PATH_LIMIT) PhiNode(result_reg,
4025 TypeInstPtr::NOTNULL);
4026 PhiNode* result_i_o = new(C, PATH_LIMIT) PhiNode(result_reg, Type::ABIO);
4027 PhiNode* result_mem = new(C, PATH_LIMIT) PhiNode(result_reg, Type::MEMORY,
4028 TypePtr::BOTTOM);
4029 record_for_igvn(result_reg);
4030
4031 const TypePtr* raw_adr_type = TypeRawPtr::BOTTOM;
4032 int raw_adr_idx = Compile::AliasIdxRaw;
4033 const bool raw_mem_only = true;
4034 4003
4035 //set the original stack and the reexecute bit for the interpreter to reexecute 4004 //set the original stack and the reexecute bit for the interpreter to reexecute
4036 //the bytecode that invokes Object.clone if deoptimization happens 4005 //the bytecode that invokes Object.clone if deoptimization happens
4037 { PreserveReexecuteState preexecs(this); 4006 { PreserveReexecuteState preexecs(this);
4007 jvms()->set_should_reexecute(true);
4008
4009 //null_check_receiver will adjust _sp (push and pop)
4010 Node* obj = null_check_receiver(callee());
4011 if (stopped()) return true;
4012
4038 _sp += nargs; 4013 _sp += nargs;
4039 jvms()->set_should_reexecute(true); 4014
4015 Node* obj_klass = load_object_klass(obj);
4016 const TypeKlassPtr* tklass = _gvn.type(obj_klass)->isa_klassptr();
4017 const TypeOopPtr* toop = ((tklass != NULL)
4018 ? tklass->as_instance_type()
4019 : TypeInstPtr::NOTNULL);
4020
4021 // Conservatively insert a memory barrier on all memory slices.
4022 // Do not let writes into the original float below the clone.
4023 insert_mem_bar(Op_MemBarCPUOrder);
4024
4025 // paths into result_reg:
4026 enum {
4027 _slow_path = 1, // out-of-line call to clone method (virtual or not)
4028 _objArray_path, // plain array allocation, plus arrayof_oop_arraycopy
4029 _array_path, // plain array allocation, plus arrayof_long_arraycopy
4030 _instance_path, // plain instance allocation, plus arrayof_long_arraycopy
4031 PATH_LIMIT
4032 };
4033 RegionNode* result_reg = new(C, PATH_LIMIT) RegionNode(PATH_LIMIT);
4034 result_val = new(C, PATH_LIMIT) PhiNode(result_reg,
4035 TypeInstPtr::NOTNULL);
4036 PhiNode* result_i_o = new(C, PATH_LIMIT) PhiNode(result_reg, Type::ABIO);
4037 PhiNode* result_mem = new(C, PATH_LIMIT) PhiNode(result_reg, Type::MEMORY,
4038 TypePtr::BOTTOM);
4039 record_for_igvn(result_reg);
4040
4041 const TypePtr* raw_adr_type = TypeRawPtr::BOTTOM;
4042 int raw_adr_idx = Compile::AliasIdxRaw;
4043 const bool raw_mem_only = true;
4044
4040 4045
4041 Node* array_ctl = generate_array_guard(obj_klass, (RegionNode*)NULL); 4046 Node* array_ctl = generate_array_guard(obj_klass, (RegionNode*)NULL);
4042 if (array_ctl != NULL) { 4047 if (array_ctl != NULL) {
4043 // It's an array. 4048 // It's an array.
4044 PreserveJVMState pjvms(this); 4049 PreserveJVMState pjvms(this);
4139 result_reg->init_req(_slow_path, control()); 4144 result_reg->init_req(_slow_path, control());
4140 result_val->init_req(_slow_path, slow_result); 4145 result_val->init_req(_slow_path, slow_result);
4141 result_i_o ->set_req(_slow_path, i_o()); 4146 result_i_o ->set_req(_slow_path, i_o());
4142 result_mem ->set_req(_slow_path, reset_memory()); 4147 result_mem ->set_req(_slow_path, reset_memory());
4143 } 4148 }
4149
4150 // Return the combined state.
4151 set_control( _gvn.transform(result_reg) );
4152 set_i_o( _gvn.transform(result_i_o) );
4153 set_all_memory( _gvn.transform(result_mem) );
4144 } //original reexecute and sp are set back here 4154 } //original reexecute and sp are set back here
4145
4146 // Return the combined state.
4147 set_control( _gvn.transform(result_reg) );
4148 set_i_o( _gvn.transform(result_i_o) );
4149 set_all_memory( _gvn.transform(result_mem) );
4150 4155
4151 push(_gvn.transform(result_val)); 4156 push(_gvn.transform(result_val));
4152 4157
4153 return true; 4158 return true;
4154 } 4159 }