comparison src/share/vm/c1/c1_LIRGenerator.cpp @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 7a302948f5a4
children 044a77cd0c8b
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
28 #include "c1/c1_Instruction.hpp" 28 #include "c1/c1_Instruction.hpp"
29 #include "c1/c1_LIRAssembler.hpp" 29 #include "c1/c1_LIRAssembler.hpp"
30 #include "c1/c1_LIRGenerator.hpp" 30 #include "c1/c1_LIRGenerator.hpp"
31 #include "c1/c1_ValueStack.hpp" 31 #include "c1/c1_ValueStack.hpp"
32 #include "ci/ciArrayKlass.hpp" 32 #include "ci/ciArrayKlass.hpp"
33 #include "ci/ciCPCache.hpp"
34 #include "ci/ciInstance.hpp" 33 #include "ci/ciInstance.hpp"
34 #include "ci/ciObjArray.hpp"
35 #include "runtime/sharedRuntime.hpp" 35 #include "runtime/sharedRuntime.hpp"
36 #include "runtime/stubRoutines.hpp" 36 #include "runtime/stubRoutines.hpp"
37 #include "utilities/bitMap.inline.hpp" 37 #include "utilities/bitMap.inline.hpp"
38 #ifndef SERIALGC 38 #ifndef SERIALGC
39 #include "gc_implementation/g1/heapRegion.hpp" 39 #include "gc_implementation/g1/heapRegion.hpp"
459 CodeEmitInfo* LIRGenerator::state_for(Instruction* x) { 459 CodeEmitInfo* LIRGenerator::state_for(Instruction* x) {
460 return state_for(x, x->exception_state()); 460 return state_for(x, x->exception_state());
461 } 461 }
462 462
463 463
464 void LIRGenerator::jobject2reg_with_patching(LIR_Opr r, ciObject* obj, CodeEmitInfo* info) { 464 void LIRGenerator::klass2reg_with_patching(LIR_Opr r, ciMetadata* obj, CodeEmitInfo* info) {
465 if (!obj->is_loaded() || PatchALot) { 465 if (!obj->is_loaded() || PatchALot) {
466 assert(info != NULL, "info must be set if class is not loaded"); 466 assert(info != NULL, "info must be set if class is not loaded");
467 __ oop2reg_patch(NULL, r, info); 467 __ klass2reg_patch(NULL, r, info);
468 } else { 468 } else {
469 // no patching needed 469 // no patching needed
470 __ oop2reg(obj->constant_encoding(), r); 470 __ oop2reg(obj->constant_encoding(), r);
471 } 471 }
472 } 472 }
655 __ unlock_object(hdr, object, lock, scratch, slow_path); 655 __ unlock_object(hdr, object, lock, scratch, slow_path);
656 } 656 }
657 657
658 658
659 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) { 659 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) {
660 jobject2reg_with_patching(klass_reg, klass, info); 660 klass2reg_with_patching(klass_reg, klass, info);
661 // If klass is not loaded we do not know if the klass has finalizers: 661 // If klass is not loaded we do not know if the klass has finalizers:
662 if (UseFastNewInstance && klass->is_loaded() 662 if (UseFastNewInstance && klass->is_loaded()
663 && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) { 663 && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) {
664 664
665 Runtime1::StubID stub_id = klass->is_initialized() ? Runtime1::fast_new_instance_id : Runtime1::fast_new_instance_init_check_id; 665 Runtime1::StubID stub_id = klass->is_initialized() ? Runtime1::fast_new_instance_id : Runtime1::fast_new_instance_init_check_id;
1187 1187
1188 void LIRGenerator::do_Return(Return* x) { 1188 void LIRGenerator::do_Return(Return* x) {
1189 if (compilation()->env()->dtrace_method_probes()) { 1189 if (compilation()->env()->dtrace_method_probes()) {
1190 BasicTypeList signature; 1190 BasicTypeList signature;
1191 signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT)); // thread 1191 signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT)); // thread
1192 signature.append(T_OBJECT); // methodOop 1192 signature.append(T_OBJECT); // Method*
1193 LIR_OprList* args = new LIR_OprList(); 1193 LIR_OprList* args = new LIR_OprList();
1194 args->append(getThreadPointer()); 1194 args->append(getThreadPointer());
1195 LIR_Opr meth = new_register(T_OBJECT); 1195 LIR_Opr meth = new_register(T_OBJECT);
1196 __ oop2reg(method()->constant_encoding(), meth); 1196 __ oop2reg(method()->constant_encoding(), meth);
1197 args->append(meth); 1197 args->append(meth);
1284 // need to perform the null check on the rcvr 1284 // need to perform the null check on the rcvr
1285 CodeEmitInfo* info = NULL; 1285 CodeEmitInfo* info = NULL;
1286 if (x->needs_null_check()) { 1286 if (x->needs_null_check()) {
1287 info = state_for(x); 1287 info = state_for(x);
1288 } 1288 }
1289 __ move(new LIR_Address(rcvr.result(), oopDesc::klass_offset_in_bytes(), T_OBJECT), result, info); 1289 __ move(new LIR_Address(rcvr.result(), oopDesc::klass_offset_in_bytes(), UseCompressedKlassPointers ? T_OBJECT : T_ADDRESS), result, info);
1290 __ move_wide(new LIR_Address(result, in_bytes(Klass::java_mirror_offset()), T_OBJECT), result); 1290 __ move_wide(new LIR_Address(result, in_bytes(Klass::java_mirror_offset()), T_OBJECT), result);
1291 } 1291 }
1292 1292
1293 1293
1294 // Example: Thread.currentThread() 1294 // Example: Thread.currentThread()
2291 LIR_Opr src_klass = new_register(T_OBJECT); 2291 LIR_Opr src_klass = new_register(T_OBJECT);
2292 if (gen_type_check) { 2292 if (gen_type_check) {
2293 // We have determined that offset == referent_offset && src != null. 2293 // We have determined that offset == referent_offset && src != null.
2294 // if (src->_klass->_reference_type == REF_NONE) -> continue 2294 // if (src->_klass->_reference_type == REF_NONE) -> continue
2295 __ move(new LIR_Address(src.result(), oopDesc::klass_offset_in_bytes(), T_OBJECT), src_klass); 2295 __ move(new LIR_Address(src.result(), oopDesc::klass_offset_in_bytes(), T_OBJECT), src_klass);
2296 LIR_Address* reference_type_addr = new LIR_Address(src_klass, in_bytes(instanceKlass::reference_type_offset()), T_BYTE); 2296 LIR_Address* reference_type_addr = new LIR_Address(src_klass, in_bytes(InstanceKlass::reference_type_offset()), T_BYTE);
2297 LIR_Opr reference_type = new_register(T_INT); 2297 LIR_Opr reference_type = new_register(T_INT);
2298 __ move(reference_type_addr, reference_type); 2298 __ move(reference_type_addr, reference_type);
2299 __ cmp(lir_cond_equal, reference_type, LIR_OprFact::intConst(REF_NONE)); 2299 __ cmp(lir_cond_equal, reference_type, LIR_OprFact::intConst(REF_NONE));
2300 __ branch(lir_cond_equal, T_INT, Lcont->label()); 2300 __ branch(lir_cond_equal, T_INT, Lcont->label());
2301 } 2301 }
2606 } 2606 }
2607 2607
2608 if (compilation()->env()->dtrace_method_probes()) { 2608 if (compilation()->env()->dtrace_method_probes()) {
2609 BasicTypeList signature; 2609 BasicTypeList signature;
2610 signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT)); // thread 2610 signature.append(LP64_ONLY(T_LONG) NOT_LP64(T_INT)); // thread
2611 signature.append(T_OBJECT); // methodOop 2611 signature.append(T_OBJECT); // Method*
2612 LIR_OprList* args = new LIR_OprList(); 2612 LIR_OprList* args = new LIR_OprList();
2613 args->append(getThreadPointer()); 2613 args->append(getThreadPointer());
2614 LIR_Opr meth = new_register(T_OBJECT); 2614 LIR_Opr meth = new_register(T_OBJECT);
2615 __ oop2reg(method()->constant_encoding(), meth); 2615 __ oop2reg(method()->constant_encoding(), meth);
2616 args->append(meth); 2616 args->append(meth);
2792 } else if (x->vtable_index() < 0) { 2792 } else if (x->vtable_index() < 0) {
2793 __ call_icvirtual(target, receiver, result_register, 2793 __ call_icvirtual(target, receiver, result_register,
2794 SharedRuntime::get_resolve_virtual_call_stub(), 2794 SharedRuntime::get_resolve_virtual_call_stub(),
2795 arg_list, info); 2795 arg_list, info);
2796 } else { 2796 } else {
2797 int entry_offset = instanceKlass::vtable_start_offset() + x->vtable_index() * vtableEntry::size(); 2797 int entry_offset = InstanceKlass::vtable_start_offset() + x->vtable_index() * vtableEntry::size();
2798 int vtable_offset = entry_offset * wordSize + vtableEntry::method_offset_in_bytes(); 2798 int vtable_offset = entry_offset * wordSize + vtableEntry::method_offset_in_bytes();
2799 __ call_virtual(target, receiver, result_register, vtable_offset, arg_list, info); 2799 __ call_virtual(target, receiver, result_register, vtable_offset, arg_list, info);
2800 } 2800 }
2801 break; 2801 break;
2802 case Bytecodes::_invokedynamic: { 2802 case Bytecodes::_invokedynamic: {
2903 } 2903 }
2904 2904
2905 void LIRGenerator::do_ClassIDIntrinsic(Intrinsic* x) { 2905 void LIRGenerator::do_ClassIDIntrinsic(Intrinsic* x) {
2906 CodeEmitInfo* info = state_for(x); 2906 CodeEmitInfo* info = state_for(x);
2907 CodeEmitInfo* info2 = new CodeEmitInfo(info); // Clone for the second null check 2907 CodeEmitInfo* info2 = new CodeEmitInfo(info); // Clone for the second null check
2908 BasicType klass_pointer_type = NOT_LP64(T_INT) LP64_ONLY(T_LONG);
2908 assert(info != NULL, "must have info"); 2909 assert(info != NULL, "must have info");
2909 LIRItem arg(x->argument_at(1), this); 2910 LIRItem arg(x->argument_at(1), this);
2910 arg.load_item(); 2911 arg.load_item();
2911 LIR_Opr klass = new_register(T_OBJECT); 2912 LIR_Opr klass = new_pointer_register();
2912 __ move(new LIR_Address(arg.result(), java_lang_Class::klass_offset_in_bytes(), T_OBJECT), klass, info); 2913 __ move(new LIR_Address(arg.result(), java_lang_Class::klass_offset_in_bytes(), klass_pointer_type), klass, info);
2913 LIR_Opr id = new_register(T_LONG); 2914 LIR_Opr id = new_register(T_LONG);
2914 ByteSize offset = TRACE_ID_OFFSET; 2915 ByteSize offset = TRACE_ID_OFFSET;
2915 LIR_Address* trace_id_addr = new LIR_Address(klass, in_bytes(offset), T_LONG); 2916 LIR_Address* trace_id_addr = new LIR_Address(klass, in_bytes(offset), T_LONG);
2916 __ move(trace_id_addr, id); 2917 __ move(trace_id_addr, id);
2917 __ logical_or(id, LIR_OprFact::longConst(0x01l), id); 2918 __ logical_or(id, LIR_OprFact::longConst(0x01l), id);
3032 3033
3033 int offset = -1; 3034 int offset = -1;
3034 LIR_Opr counter_holder = new_register(T_OBJECT); 3035 LIR_Opr counter_holder = new_register(T_OBJECT);
3035 LIR_Opr meth; 3036 LIR_Opr meth;
3036 if (level == CompLevel_limited_profile) { 3037 if (level == CompLevel_limited_profile) {
3037 offset = in_bytes(backedge ? methodOopDesc::backedge_counter_offset() : 3038 offset = in_bytes(backedge ? Method::backedge_counter_offset() :
3038 methodOopDesc::invocation_counter_offset()); 3039 Method::invocation_counter_offset());
3039 __ oop2reg(method->constant_encoding(), counter_holder); 3040 __ oop2reg(method->constant_encoding(), counter_holder);
3040 meth = counter_holder; 3041 meth = counter_holder;
3041 } else if (level == CompLevel_full_profile) { 3042 } else if (level == CompLevel_full_profile) {
3042 offset = in_bytes(backedge ? methodDataOopDesc::backedge_counter_offset() : 3043 offset = in_bytes(backedge ? MethodData::backedge_counter_offset() :
3043 methodDataOopDesc::invocation_counter_offset()); 3044 MethodData::invocation_counter_offset());
3044 ciMethodData* md = method->method_data_or_null(); 3045 ciMethodData* md = method->method_data_or_null();
3045 assert(md != NULL, "Sanity"); 3046 assert(md != NULL, "Sanity");
3046 __ oop2reg(md->constant_encoding(), counter_holder); 3047 __ oop2reg(md->constant_encoding(), counter_holder);
3047 meth = new_register(T_OBJECT); 3048 meth = new_register(T_OBJECT);
3048 __ oop2reg(method->constant_encoding(), meth); 3049 __ oop2reg(method->constant_encoding(), meth);