# HG changeset patch # User kvn # Date 1339428928 25200 # Node ID 829ee34e7cbd2b272fae8e3c184ce416a755ac52 # Parent 063451aefde8ffcccd2ed5b3a345f706f3560f01# Parent e1635876b206bc7a87386923c2cc419031500d99 Merge diff -r 063451aefde8 -r 829ee34e7cbd make/jprt.properties --- a/make/jprt.properties Fri Jun 08 09:49:49 2012 -0700 +++ b/make/jprt.properties Mon Jun 11 08:35:28 2012 -0700 @@ -54,72 +54,72 @@ # Define the Solaris platforms we want for the various releases jprt.my.solaris.sparc.jdk8=solaris_sparc_5.10 jprt.my.solaris.sparc.jdk7=solaris_sparc_5.10 -jprt.my.solaris.sparc.jdk7u4=${jprt.my.solaris.sparc.jdk7} +jprt.my.solaris.sparc.jdk7u6=${jprt.my.solaris.sparc.jdk7} jprt.my.solaris.sparc=${jprt.my.solaris.sparc.${jprt.tools.default.release}} jprt.my.solaris.sparcv9.jdk8=solaris_sparcv9_5.10 jprt.my.solaris.sparcv9.jdk7=solaris_sparcv9_5.10 -jprt.my.solaris.sparcv9.jdk7u4=${jprt.my.solaris.sparcv9.jdk7} +jprt.my.solaris.sparcv9.jdk7u6=${jprt.my.solaris.sparcv9.jdk7} jprt.my.solaris.sparcv9=${jprt.my.solaris.sparcv9.${jprt.tools.default.release}} jprt.my.solaris.i586.jdk8=solaris_i586_5.10 jprt.my.solaris.i586.jdk7=solaris_i586_5.10 -jprt.my.solaris.i586.jdk7u4=${jprt.my.solaris.i586.jdk7} +jprt.my.solaris.i586.jdk7u6=${jprt.my.solaris.i586.jdk7} jprt.my.solaris.i586=${jprt.my.solaris.i586.${jprt.tools.default.release}} jprt.my.solaris.x64.jdk8=solaris_x64_5.10 jprt.my.solaris.x64.jdk7=solaris_x64_5.10 -jprt.my.solaris.x64.jdk7u4=${jprt.my.solaris.x64.jdk7} +jprt.my.solaris.x64.jdk7u6=${jprt.my.solaris.x64.jdk7} jprt.my.solaris.x64=${jprt.my.solaris.x64.${jprt.tools.default.release}} jprt.my.linux.i586.jdk8=linux_i586_2.6 jprt.my.linux.i586.jdk7=linux_i586_2.6 -jprt.my.linux.i586.jdk7u4=${jprt.my.linux.i586.jdk7} +jprt.my.linux.i586.jdk7u6=${jprt.my.linux.i586.jdk7} jprt.my.linux.i586=${jprt.my.linux.i586.${jprt.tools.default.release}} jprt.my.linux.x64.jdk8=linux_x64_2.6 jprt.my.linux.x64.jdk7=linux_x64_2.6 -jprt.my.linux.x64.jdk7u4=${jprt.my.linux.x64.jdk7} +jprt.my.linux.x64.jdk7u6=${jprt.my.linux.x64.jdk7} jprt.my.linux.x64=${jprt.my.linux.x64.${jprt.tools.default.release}} jprt.my.linux.ppc.jdk8=linux_ppc_2.6 jprt.my.linux.ppc.jdk7=linux_ppc_2.6 -jprt.my.linux.ppc.jdk7u4=${jprt.my.linux.ppc.jdk7} +jprt.my.linux.ppc.jdk7u6=${jprt.my.linux.ppc.jdk7} jprt.my.linux.ppc=${jprt.my.linux.ppc.${jprt.tools.default.release}} jprt.my.linux.ppcv2.jdk8=linux_ppcv2_2.6 jprt.my.linux.ppcv2.jdk7=linux_ppcv2_2.6 -jprt.my.linux.ppcv2.jdk7u4=${jprt.my.linux.ppcv2.jdk7} +jprt.my.linux.ppcv2.jdk7u6=${jprt.my.linux.ppcv2.jdk7} jprt.my.linux.ppcv2=${jprt.my.linux.ppcv2.${jprt.tools.default.release}} jprt.my.linux.ppcsflt.jdk8=linux_ppcsflt_2.6 jprt.my.linux.ppcsflt.jdk7=linux_ppcsflt_2.6 -jprt.my.linux.ppcsflt.jdk7u4=${jprt.my.linux.ppcsflt.jdk7} +jprt.my.linux.ppcsflt.jdk7u6=${jprt.my.linux.ppcsflt.jdk7} jprt.my.linux.ppcsflt=${jprt.my.linux.ppcsflt.${jprt.tools.default.release}} jprt.my.linux.armvfp.jdk8=linux_armvfp_2.6 jprt.my.linux.armvfp.jdk7=linux_armvfp_2.6 -jprt.my.linux.armvfp.jdk7u4=${jprt.my.linux.armvfp.jdk7} +jprt.my.linux.armvfp.jdk7u6=${jprt.my.linux.armvfp.jdk7} jprt.my.linux.armvfp=${jprt.my.linux.armvfp.${jprt.tools.default.release}} jprt.my.linux.armsflt.jdk8=linux_armsflt_2.6 jprt.my.linux.armsflt.jdk7=linux_armsflt_2.6 -jprt.my.linux.armsflt.jdk7u4=${jprt.my.linux.armsflt.jdk7} +jprt.my.linux.armsflt.jdk7u6=${jprt.my.linux.armsflt.jdk7} jprt.my.linux.armsflt=${jprt.my.linux.armsflt.${jprt.tools.default.release}} jprt.my.macosx.x64.jdk8=macosx_x64_10.7 jprt.my.macosx.x64.jdk7=macosx_x64_10.7 -jprt.my.macosx.x64.jdk7u4=${jprt.my.macosx.x64.jdk7} +jprt.my.macosx.x64.jdk7u6=${jprt.my.macosx.x64.jdk7} jprt.my.macosx.x64=${jprt.my.macosx.x64.${jprt.tools.default.release}} jprt.my.windows.i586.jdk8=windows_i586_5.1 jprt.my.windows.i586.jdk7=windows_i586_5.1 -jprt.my.windows.i586.jdk7u4=${jprt.my.windows.i586.jdk7} +jprt.my.windows.i586.jdk7u6=${jprt.my.windows.i586.jdk7} jprt.my.windows.i586=${jprt.my.windows.i586.${jprt.tools.default.release}} jprt.my.windows.x64.jdk8=windows_x64_5.2 jprt.my.windows.x64.jdk7=windows_x64_5.2 -jprt.my.windows.x64.jdk7u4=${jprt.my.windows.x64.jdk7} +jprt.my.windows.x64.jdk7u6=${jprt.my.windows.x64.jdk7} jprt.my.windows.x64=${jprt.my.windows.x64.${jprt.tools.default.release}} # Standard list of jprt build targets for this source tree @@ -154,7 +154,7 @@ jprt.build.targets.jdk8=${jprt.build.targets.all} jprt.build.targets.jdk7=${jprt.build.targets.all} -jprt.build.targets.jdk7u4=${jprt.build.targets.all} +jprt.build.targets.jdk7u6=${jprt.build.targets.all} jprt.build.targets=${jprt.build.targets.${jprt.tools.default.release}} # Subset lists of test targets for this source tree @@ -447,7 +447,7 @@ jprt.test.targets.jdk8=${jprt.test.targets.standard} jprt.test.targets.jdk7=${jprt.test.targets.standard} -jprt.test.targets.jdk7u4=${jprt.test.targets.jdk7} +jprt.test.targets.jdk7u6=${jprt.test.targets.jdk7} jprt.test.targets=${jprt.test.targets.${jprt.tools.default.release}} # The default test/Makefile targets that should be run @@ -507,7 +507,7 @@ jprt.make.rule.test.targets.jdk8=${jprt.make.rule.test.targets.standard} jprt.make.rule.test.targets.jdk7=${jprt.make.rule.test.targets.standard} -jprt.make.rule.test.targets.jdk7u4=${jprt.make.rule.test.targets.jdk7} +jprt.make.rule.test.targets.jdk7u6=${jprt.make.rule.test.targets.jdk7} jprt.make.rule.test.targets=${jprt.make.rule.test.targets.${jprt.tools.default.release}} # 7155453: Work-around to prevent popups on OSX from blocking test completion diff -r 063451aefde8 -r 829ee34e7cbd src/share/vm/c1/c1_Canonicalizer.cpp --- a/src/share/vm/c1/c1_Canonicalizer.cpp Fri Jun 08 09:49:49 2012 -0700 +++ b/src/share/vm/c1/c1_Canonicalizer.cpp Mon Jun 11 08:35:28 2012 -0700 @@ -42,6 +42,11 @@ // the instruction stream (because the instruction list is embedded // in the instructions). if (canonical() != x) { +#ifndef PRODUCT + if (!x->has_printable_bci()) { + x->set_printable_bci(bci()); + } +#endif if (PrintCanonicalization) { PrintValueVisitor do_print_value; canonical()->input_values_do(&do_print_value); @@ -451,6 +456,28 @@ } break; } + case vmIntrinsics::_isInstance : { + assert(x->number_of_arguments() == 2, "wrong type"); + + InstanceConstant* c = x->argument_at(0)->type()->as_InstanceConstant(); + if (c != NULL && !c->value()->is_null_object()) { + // ciInstance::java_mirror_type() returns non-NULL only for Java mirrors + ciType* t = c->value()->as_instance()->java_mirror_type(); + if (t->is_klass()) { + // substitute cls.isInstance(obj) of a constant Class into + // an InstantOf instruction + InstanceOf* i = new InstanceOf(t->as_klass(), x->argument_at(1), x->state_before()); + set_canonical(i); + // and try to canonicalize even further + do_InstanceOf(i); + } else { + assert(t->is_primitive_type(), "should be a primitive type"); + // cls.isInstance(obj) always returns false for primitive classes + set_constant(0); + } + } + break; + } } } @@ -677,8 +704,8 @@ return; } } + set_bci(cmp->state_before()->bci()); set_canonical(canon); - set_bci(cmp->state_before()->bci()); } } } else if (l->as_InstanceOf() != NULL) { diff -r 063451aefde8 -r 829ee34e7cbd src/share/vm/c1/c1_GraphBuilder.cpp --- a/src/share/vm/c1/c1_GraphBuilder.cpp Fri Jun 08 09:49:49 2012 -0700 +++ b/src/share/vm/c1/c1_GraphBuilder.cpp Mon Jun 11 08:35:28 2012 -0700 @@ -3170,6 +3170,7 @@ break; case vmIntrinsics::_getClass : + case vmIntrinsics::_isInstance : if (!InlineClassNatives) return false; preserves_state = true; break; diff -r 063451aefde8 -r 829ee34e7cbd src/share/vm/c1/c1_Instruction.hpp --- a/src/share/vm/c1/c1_Instruction.hpp Fri Jun 08 09:49:49 2012 -0700 +++ b/src/share/vm/c1/c1_Instruction.hpp Mon Jun 11 08:35:28 2012 -0700 @@ -302,8 +302,6 @@ void update_exception_state(ValueStack* state); - bool has_printable_bci() const { return NOT_PRODUCT(_printable_bci != -99) PRODUCT_ONLY(false); } - protected: void set_type(ValueType* type) { assert(type != NULL, "type must exist"); @@ -392,8 +390,9 @@ // accessors int id() const { return _id; } #ifndef PRODUCT + bool has_printable_bci() const { return _printable_bci != -99; } int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } - void set_printable_bci(int bci) { NOT_PRODUCT(_printable_bci = bci;) } + void set_printable_bci(int bci) { _printable_bci = bci; } #endif int use_count() const { return _use_count; } int pin_state() const { return _pin_state; } @@ -576,6 +575,7 @@ , _block(b) , _index(index) { + NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); if (type->is_illegal()) { make_illegal(); } @@ -631,7 +631,9 @@ : Instruction(type) , _java_index(index) , _declared_type(declared) - {} + { + NOT_PRODUCT(set_printable_bci(-1)); + } // accessors int java_index() const { return _java_index; } diff -r 063451aefde8 -r 829ee34e7cbd src/share/vm/c1/c1_LIRGenerator.cpp --- a/src/share/vm/c1/c1_LIRGenerator.cpp Fri Jun 08 09:49:49 2012 -0700 +++ b/src/share/vm/c1/c1_LIRGenerator.cpp Mon Jun 11 08:35:28 2012 -0700 @@ -1242,6 +1242,36 @@ NULL /* info */); } +// Example: clazz.isInstance(object) +void LIRGenerator::do_isInstance(Intrinsic* x) { + assert(x->number_of_arguments() == 2, "wrong type"); + + // TODO could try to substitute this node with an equivalent InstanceOf + // if clazz is known to be a constant Class. This will pick up newly found + // constants after HIR construction. I'll leave this to a future change. + + // as a first cut, make a simple leaf call to runtime to stay platform independent. + // could follow the aastore example in a future change. + + LIRItem clazz(x->argument_at(0), this); + LIRItem object(x->argument_at(1), this); + clazz.load_item(); + object.load_item(); + LIR_Opr result = rlock_result(x); + + // need to perform null check on clazz + if (x->needs_null_check()) { + CodeEmitInfo* info = state_for(x); + __ null_check(clazz.result(), info); + } + + LIR_Opr call_result = call_runtime(clazz.value(), object.value(), + CAST_FROM_FN_PTR(address, Runtime1::is_instance_of), + x->type(), + NULL); // NULL CodeEmitInfo results in a leaf call + __ move(call_result, result); +} + // Example: object.getClass () void LIRGenerator::do_getClass(Intrinsic* x) { assert(x->number_of_arguments() == 1, "wrong type"); @@ -2951,6 +2981,7 @@ break; case vmIntrinsics::_Object_init: do_RegisterFinalizer(x); break; + case vmIntrinsics::_isInstance: do_isInstance(x); break; case vmIntrinsics::_getClass: do_getClass(x); break; case vmIntrinsics::_currentThread: do_currentThread(x); break; diff -r 063451aefde8 -r 829ee34e7cbd src/share/vm/c1/c1_LIRGenerator.hpp --- a/src/share/vm/c1/c1_LIRGenerator.hpp Fri Jun 08 09:49:49 2012 -0700 +++ b/src/share/vm/c1/c1_LIRGenerator.hpp Mon Jun 11 08:35:28 2012 -0700 @@ -238,6 +238,7 @@ LIR_Opr getThreadPointer(); void do_RegisterFinalizer(Intrinsic* x); + void do_isInstance(Intrinsic* x); void do_getClass(Intrinsic* x); void do_currentThread(Intrinsic* x); void do_MathIntrinsic(Intrinsic* x); diff -r 063451aefde8 -r 829ee34e7cbd src/share/vm/c1/c1_Runtime1.cpp --- a/src/share/vm/c1/c1_Runtime1.cpp Fri Jun 08 09:49:49 2012 -0700 +++ b/src/share/vm/c1/c1_Runtime1.cpp Mon Jun 11 08:35:28 2012 -0700 @@ -294,6 +294,7 @@ FUNCTION_CASE(entry, SharedRuntime::lrem); FUNCTION_CASE(entry, SharedRuntime::dtrace_method_entry); FUNCTION_CASE(entry, SharedRuntime::dtrace_method_exit); + FUNCTION_CASE(entry, is_instance_of); FUNCTION_CASE(entry, trace_block_entry); #ifdef TRACE_HAVE_INTRINSICS FUNCTION_CASE(entry, TRACE_TIME_METHOD); @@ -1270,6 +1271,19 @@ JRT_END +JRT_LEAF(int, Runtime1::is_instance_of(oopDesc* mirror, oopDesc* obj)) + // had to return int instead of bool, otherwise there may be a mismatch + // between the C calling convention and the Java one. + // e.g., on x86, GCC may clear only %al when returning a bool false, but + // JVM takes the whole %eax as the return value, which may misinterpret + // the return value as a boolean true. + + assert(mirror != NULL, "should null-check on mirror before calling"); + klassOop k = java_lang_Class::as_klassOop(mirror); + return (k != NULL && obj != NULL && obj->is_a(k)) ? 1 : 0; +JRT_END + + #ifndef PRODUCT void Runtime1::print_statistics() { tty->print_cr("C1 Runtime statistics:"); diff -r 063451aefde8 -r 829ee34e7cbd src/share/vm/c1/c1_Runtime1.hpp --- a/src/share/vm/c1/c1_Runtime1.hpp Fri Jun 08 09:49:49 2012 -0700 +++ b/src/share/vm/c1/c1_Runtime1.hpp Mon Jun 11 08:35:28 2012 -0700 @@ -186,6 +186,7 @@ static int arraycopy(oopDesc* src, int src_pos, oopDesc* dst, int dst_pos, int length); static void primitive_arraycopy(HeapWord* src, HeapWord* dst, int length); static void oop_arraycopy(HeapWord* src, HeapWord* dst, int length); + static int is_instance_of(oopDesc* mirror, oopDesc* obj); static void print_statistics() PRODUCT_RETURN; }; diff -r 063451aefde8 -r 829ee34e7cbd src/share/vm/c1/c1_ValueMap.hpp --- a/src/share/vm/c1/c1_ValueMap.hpp Fri Jun 08 09:49:49 2012 -0700 +++ b/src/share/vm/c1/c1_ValueMap.hpp Mon Jun 11 08:35:28 2012 -0700 @@ -141,8 +141,11 @@ // visitor functions void do_StoreField (StoreField* x) { - if (x->is_init_point()) { - // putstatic is an initialization point so treat it as a wide kill + if (x->is_init_point() || // putstatic is an initialization point so treat it as a wide kill + // This is actually too strict and the JMM doesn't require + // this in all cases (e.g. load a; volatile store b; load a) + // but possible future optimizations might require this. + x->field()->is_volatile()) { kill_memory(); } else { kill_field(x->field()); @@ -160,8 +163,8 @@ void do_Local (Local* x) { /* nothing to do */ } void do_Constant (Constant* x) { /* nothing to do */ } void do_LoadField (LoadField* x) { - if (x->is_init_point()) { - // getstatic is an initialization point so treat it as a wide kill + if (x->is_init_point() || // getstatic is an initialization point so treat it as a wide kill + x->field()->is_volatile()) { // the JMM requires this kill_memory(); } } diff -r 063451aefde8 -r 829ee34e7cbd src/share/vm/opto/parse.hpp --- a/src/share/vm/opto/parse.hpp Fri Jun 08 09:49:49 2012 -0700 +++ b/src/share/vm/opto/parse.hpp Mon Jun 11 08:35:28 2012 -0700 @@ -527,6 +527,9 @@ int repush_if_args(); void adjust_map_after_if(BoolTest::mask btest, Node* c, float prob, Block* path, Block* other_path); + void sharpen_type_after_if(BoolTest::mask btest, + Node* con, const Type* tcon, + Node* val, const Type* tval); IfNode* jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask); Node* jump_if_join(Node* iffalse, Node* iftrue); void jump_if_true_fork(IfNode *ifNode, int dest_bci_if_true, int prof_table_index); diff -r 063451aefde8 -r 829ee34e7cbd src/share/vm/opto/parse2.cpp --- a/src/share/vm/opto/parse2.cpp Fri Jun 08 09:49:49 2012 -0700 +++ b/src/share/vm/opto/parse2.cpp Mon Jun 11 08:35:28 2012 -0700 @@ -1233,6 +1233,71 @@ if (!have_con) // remaining adjustments need a con return; + sharpen_type_after_if(btest, con, tcon, val, tval); +} + + +static Node* extract_obj_from_klass_load(PhaseGVN* gvn, Node* n) { + Node* ldk; + if (n->is_DecodeN()) { + if (n->in(1)->Opcode() != Op_LoadNKlass) { + return NULL; + } else { + ldk = n->in(1); + } + } else if (n->Opcode() != Op_LoadKlass) { + return NULL; + } else { + ldk = n; + } + assert(ldk != NULL && ldk->is_Load(), "should have found a LoadKlass or LoadNKlass node"); + + Node* adr = ldk->in(MemNode::Address); + intptr_t off = 0; + Node* obj = AddPNode::Ideal_base_and_offset(adr, gvn, off); + if (obj == NULL || off != oopDesc::klass_offset_in_bytes()) // loading oopDesc::_klass? + return NULL; + const TypePtr* tp = gvn->type(obj)->is_ptr(); + if (tp == NULL || !(tp->isa_instptr() || tp->isa_aryptr())) // is obj a Java object ptr? + return NULL; + + return obj; +} + +void Parse::sharpen_type_after_if(BoolTest::mask btest, + Node* con, const Type* tcon, + Node* val, const Type* tval) { + // Look for opportunities to sharpen the type of a node + // whose klass is compared with a constant klass. + if (btest == BoolTest::eq && tcon->isa_klassptr()) { + Node* obj = extract_obj_from_klass_load(&_gvn, val); + const TypeOopPtr* con_type = tcon->isa_klassptr()->as_instance_type(); + if (obj != NULL && (con_type->isa_instptr() || con_type->isa_aryptr())) { + // Found: + // Bool(CmpP(LoadKlass(obj._klass), ConP(Foo.klass)), [eq]) + // or the narrowOop equivalent. + const Type* obj_type = _gvn.type(obj); + const TypeOopPtr* tboth = obj_type->join(con_type)->isa_oopptr(); + if (tboth != NULL && tboth != obj_type && tboth->higher_equal(obj_type)) { + // obj has to be of the exact type Foo if the CmpP succeeds. + assert(tboth->klass_is_exact(), "klass should be exact"); + int obj_in_map = map()->find_edge(obj); + JVMState* jvms = this->jvms(); + if (obj_in_map >= 0 && + (jvms->is_loc(obj_in_map) || jvms->is_stk(obj_in_map))) { + TypeNode* ccast = new (C, 2) CheckCastPPNode(control(), obj, tboth); + const Type* tcc = ccast->as_Type()->type(); + assert(tcc != obj_type && tcc->higher_equal(obj_type), "must improve"); + // Delay transform() call to allow recovery of pre-cast value + // at the control merge. + _gvn.set_type_bottom(ccast); + record_for_igvn(ccast); + // Here's the payoff. + replace_in_map(obj, ccast); + } + } + } + } int val_in_map = map()->find_edge(val); if (val_in_map < 0) return; // replace_in_map would be useless @@ -1265,6 +1330,7 @@ // Exclude tests vs float/double 0 as these could be // either +0 or -0. Just because you are equal to +0 // doesn't mean you ARE +0! + // Note, following code also replaces Long and Oop values. if ((!tf || tf->_f != 0.0) && (!td || td->_d != 0.0)) cast = con; // Replace non-constant val by con. diff -r 063451aefde8 -r 829ee34e7cbd src/share/vm/opto/subnode.cpp --- a/src/share/vm/opto/subnode.cpp Fri Jun 08 09:49:49 2012 -0700 +++ b/src/share/vm/opto/subnode.cpp Mon Jun 11 08:35:28 2012 -0700 @@ -702,12 +702,84 @@ return TypeInt::CC; } +static inline Node* isa_java_mirror_load(PhaseGVN* phase, Node* n) { + // Return the klass node for + // LoadP(AddP(foo:Klass, #java_mirror)) + // or NULL if not matching. + if (n->Opcode() != Op_LoadP) return NULL; + + const TypeInstPtr* tp = phase->type(n)->isa_instptr(); + if (!tp || tp->klass() != phase->C->env()->Class_klass()) return NULL; + + Node* adr = n->in(MemNode::Address); + intptr_t off = 0; + Node* k = AddPNode::Ideal_base_and_offset(adr, phase, off); + if (k == NULL) return NULL; + const TypeKlassPtr* tkp = phase->type(k)->isa_klassptr(); + if (!tkp || off != in_bytes(Klass::java_mirror_offset())) return NULL; + + // We've found the klass node of a Java mirror load. + return k; +} + +static inline Node* isa_const_java_mirror(PhaseGVN* phase, Node* n) { + // for ConP(Foo.class) return ConP(Foo.klass) + // otherwise return NULL + if (!n->is_Con()) return NULL; + + const TypeInstPtr* tp = phase->type(n)->isa_instptr(); + if (!tp) return NULL; + + ciType* mirror_type = tp->java_mirror_type(); + // TypeInstPtr::java_mirror_type() returns non-NULL for compile- + // time Class constants only. + if (!mirror_type) return NULL; + + // x.getClass() == int.class can never be true (for all primitive types) + // Return a ConP(NULL) node for this case. + if (mirror_type->is_classless()) { + return phase->makecon(TypePtr::NULL_PTR); + } + + // return the ConP(Foo.klass) + assert(mirror_type->is_klass(), "mirror_type should represent a klassOop"); + return phase->makecon(TypeKlassPtr::make(mirror_type->as_klass())); +} + //------------------------------Ideal------------------------------------------ -// Check for the case of comparing an unknown klass loaded from the primary +// Normalize comparisons between Java mirror loads to compare the klass instead. +// +// Also check for the case of comparing an unknown klass loaded from the primary // super-type array vs a known klass with no subtypes. This amounts to // checking to see an unknown klass subtypes a known klass with no subtypes; // this only happens on an exact match. We can shorten this test by 1 load. Node *CmpPNode::Ideal( PhaseGVN *phase, bool can_reshape ) { + // Normalize comparisons between Java mirrors into comparisons of the low- + // level klass, where a dependent load could be shortened. + // + // The new pattern has a nice effect of matching the same pattern used in the + // fast path of instanceof/checkcast/Class.isInstance(), which allows + // redundant exact type check be optimized away by GVN. + // For example, in + // if (x.getClass() == Foo.class) { + // Foo foo = (Foo) x; + // // ... use a ... + // } + // a CmpPNode could be shared between if_acmpne and checkcast + { + Node* k1 = isa_java_mirror_load(phase, in(1)); + Node* k2 = isa_java_mirror_load(phase, in(2)); + Node* conk2 = isa_const_java_mirror(phase, in(2)); + + if (k1 && (k2 || conk2)) { + Node* lhs = k1; + Node* rhs = (k2 != NULL) ? k2 : conk2; + this->set_req(1, lhs); + this->set_req(2, rhs); + return this; + } + } + // Constant pointer on right? const TypeKlassPtr* t2 = phase->type(in(2))->isa_klassptr(); if (t2 == NULL || !t2->klass_is_exact())