comparison src/share/vm/opto/library_call.cpp @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 09aad8452938
children 7eca5de9e0b6
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
2902 if (stopped()) return true; // argument was like int.class 2902 if (stopped()) return true; // argument was like int.class
2903 2903
2904 // Note: The argument might still be an illegal value like 2904 // Note: The argument might still be an illegal value like
2905 // Serializable.class or Object[].class. The runtime will handle it. 2905 // Serializable.class or Object[].class. The runtime will handle it.
2906 // But we must make an explicit check for initialization. 2906 // But we must make an explicit check for initialization.
2907 Node* insp = basic_plus_adr(kls, in_bytes(instanceKlass::init_state_offset())); 2907 Node* insp = basic_plus_adr(kls, in_bytes(InstanceKlass::init_state_offset()));
2908 // Use T_BOOLEAN for instanceKlass::_init_state so the compiler 2908 // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
2909 // can generate code to load it as unsigned byte. 2909 // can generate code to load it as unsigned byte.
2910 Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN); 2910 Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN);
2911 Node* bits = intcon(instanceKlass::fully_initialized); 2911 Node* bits = intcon(InstanceKlass::fully_initialized);
2912 Node* test = _gvn.transform( new (C, 3) SubINode(inst, bits) ); 2912 Node* test = _gvn.transform( new (C, 3) SubINode(inst, bits) );
2913 // The 'test' is non-zero if we need to take a slow path. 2913 // The 'test' is non-zero if we need to take a slow path.
2914 2914
2915 Node* obj = new_instance(kls, test); 2915 Node* obj = new_instance(kls, test);
2916 push(obj); 2916 push(obj);
3473 jint layout_con = 0; 3473 jint layout_con = 0;
3474 Node* layout_val = get_layout_helper(kls, layout_con); 3474 Node* layout_val = get_layout_helper(kls, layout_con);
3475 if (layout_val == NULL) { 3475 if (layout_val == NULL) {
3476 bool query = (obj_array 3476 bool query = (obj_array
3477 ? Klass::layout_helper_is_objArray(layout_con) 3477 ? Klass::layout_helper_is_objArray(layout_con)
3478 : Klass::layout_helper_is_javaArray(layout_con)); 3478 : Klass::layout_helper_is_array(layout_con));
3479 if (query == not_array) { 3479 if (query == not_array) {
3480 return NULL; // never a branch 3480 return NULL; // never a branch
3481 } else { // always a branch 3481 } else { // always a branch
3482 Node* always_branch = control(); 3482 Node* always_branch = control();
3483 if (region != NULL) 3483 if (region != NULL)
3708 // Helper for hashCode and clone. Peeks inside the vtable to avoid a call. 3708 // Helper for hashCode and clone. Peeks inside the vtable to avoid a call.
3709 Node* LibraryCallKit::generate_virtual_guard(Node* obj_klass, 3709 Node* LibraryCallKit::generate_virtual_guard(Node* obj_klass,
3710 RegionNode* slow_region) { 3710 RegionNode* slow_region) {
3711 ciMethod* method = callee(); 3711 ciMethod* method = callee();
3712 int vtable_index = method->vtable_index(); 3712 int vtable_index = method->vtable_index();
3713 // Get the methodOop out of the appropriate vtable entry. 3713 // Get the Method* out of the appropriate vtable entry.
3714 int entry_offset = (instanceKlass::vtable_start_offset() + 3714 int entry_offset = (InstanceKlass::vtable_start_offset() +
3715 vtable_index*vtableEntry::size()) * wordSize + 3715 vtable_index*vtableEntry::size()) * wordSize +
3716 vtableEntry::method_offset_in_bytes(); 3716 vtableEntry::method_offset_in_bytes();
3717 Node* entry_addr = basic_plus_adr(obj_klass, entry_offset); 3717 Node* entry_addr = basic_plus_adr(obj_klass, entry_offset);
3718 Node* target_call = make_load(NULL, entry_addr, TypeInstPtr::NOTNULL, T_OBJECT); 3718 Node* target_call = make_load(NULL, entry_addr, TypeInstPtr::NOTNULL, T_OBJECT);
3719 3719
3720 // Compare the target method with the expected method (e.g., Object.hashCode). 3720 // Compare the target method with the expected method (e.g., Object.hashCode).
3721 const TypeInstPtr* native_call_addr = TypeInstPtr::make(method); 3721 const TypePtr* native_call_addr = TypeMetadataPtr::make(method);
3722 3722
3723 Node* native_call = makecon(native_call_addr); 3723 Node* native_call = makecon(native_call_addr);
3724 Node* chk_native = _gvn.transform( new(C, 3) CmpPNode(target_call, native_call) ); 3724 Node* chk_native = _gvn.transform( new(C, 3) CmpPNode(target_call, native_call) );
3725 Node* test_native = _gvn.transform( new(C, 2) BoolNode(chk_native, BoolTest::ne) ); 3725 Node* test_native = _gvn.transform( new(C, 2) BoolNode(chk_native, BoolTest::ne) );
3726 3726
3751 slow_call = new(C, tfdc) CallStaticJavaNode(tf, 3751 slow_call = new(C, tfdc) CallStaticJavaNode(tf,
3752 SharedRuntime::get_resolve_static_call_stub(), 3752 SharedRuntime::get_resolve_static_call_stub(),
3753 method, bci()); 3753 method, bci());
3754 } else if (is_virtual) { 3754 } else if (is_virtual) {
3755 null_check_receiver(method); 3755 null_check_receiver(method);
3756 int vtable_index = methodOopDesc::invalid_vtable_index; 3756 int vtable_index = Method::invalid_vtable_index;
3757 if (UseInlineCaches) { 3757 if (UseInlineCaches) {
3758 // Suppress the vtable call 3758 // Suppress the vtable call
3759 } else { 3759 } else {
3760 // hashCode and clone are not a miranda methods, 3760 // hashCode and clone are not a miranda methods,
3761 // so the vtable index is fixed. 3761 // so the vtable index is fixed.
4264 // Can't use base_offset_in_bytes(bt) since basic type is unknown. 4264 // Can't use base_offset_in_bytes(bt) since basic type is unknown.
4265 int base_off = is_array ? arrayOopDesc::length_offset_in_bytes() : 4265 int base_off = is_array ? arrayOopDesc::length_offset_in_bytes() :
4266 instanceOopDesc::base_offset_in_bytes(); 4266 instanceOopDesc::base_offset_in_bytes();
4267 // base_off: 4267 // base_off:
4268 // 8 - 32-bit VM 4268 // 8 - 32-bit VM
4269 // 12 - 64-bit VM, compressed oops 4269 // 12 - 64-bit VM, compressed klass
4270 // 16 - 64-bit VM, normal oops 4270 // 16 - 64-bit VM, normal klass
4271 if (base_off % BytesPerLong != 0) { 4271 if (base_off % BytesPerLong != 0) {
4272 assert(UseCompressedOops, ""); 4272 assert(UseCompressedOops, "");
4273 if (is_array) { 4273 if (is_array) {
4274 // Exclude length to copy by 8 bytes words. 4274 // Exclude length to copy by 8 bytes words.
4275 base_off += sizeof(int); 4275 base_off += sizeof(int);
4897 // If they are not, we will use checked_oop_disjoint_arraycopy, 4897 // If they are not, we will use checked_oop_disjoint_arraycopy,
4898 // which performs a fast optimistic per-oop check, and backs off 4898 // which performs a fast optimistic per-oop check, and backs off
4899 // further to JVM_ArrayCopy on the first per-oop check that fails. 4899 // further to JVM_ArrayCopy on the first per-oop check that fails.
4900 // (Actually, we don't move raw bits only; the GC requires card marks.) 4900 // (Actually, we don't move raw bits only; the GC requires card marks.)
4901 4901
4902 // Get the klassOop for both src and dest 4902 // Get the Klass* for both src and dest
4903 Node* src_klass = load_object_klass(src); 4903 Node* src_klass = load_object_klass(src);
4904 Node* dest_klass = load_object_klass(dest); 4904 Node* dest_klass = load_object_klass(dest);
4905 4905
4906 // Generate the subtype check. 4906 // Generate the subtype check.
4907 // This might fold up statically, or then again it might not. 4907 // This might fold up statically, or then again it might not.