comparison src/share/vm/code/nmethod.cpp @ 6725:da91efe96a93

6964458: Reimplement class meta-data storage to use native memory Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>
author coleenp
date Sat, 01 Sep 2012 13:25:18 -0400
parents 1d7922586cf6
children 8966c2d65d96 2cb2f30450c7
comparison
equal deleted inserted replaced
6724:36d1d483d5d6 6725:da91efe96a93
32 #include "compiler/compileBroker.hpp" 32 #include "compiler/compileBroker.hpp"
33 #include "compiler/compileLog.hpp" 33 #include "compiler/compileLog.hpp"
34 #include "compiler/compilerOracle.hpp" 34 #include "compiler/compilerOracle.hpp"
35 #include "compiler/disassembler.hpp" 35 #include "compiler/disassembler.hpp"
36 #include "interpreter/bytecode.hpp" 36 #include "interpreter/bytecode.hpp"
37 #include "oops/methodDataOop.hpp" 37 #include "oops/methodData.hpp"
38 #include "prims/jvmtiRedefineClassesTrace.hpp" 38 #include "prims/jvmtiRedefineClassesTrace.hpp"
39 #include "prims/jvmtiImpl.hpp" 39 #include "prims/jvmtiImpl.hpp"
40 #include "runtime/sharedRuntime.hpp" 40 #include "runtime/sharedRuntime.hpp"
41 #include "runtime/sweeper.hpp" 41 #include "runtime/sweeper.hpp"
42 #include "utilities/dtrace.hpp" 42 #include "utilities/dtrace.hpp"
57 HS_DTRACE_PROBE_DECL6(hotspot, compiled__method__unload, 57 HS_DTRACE_PROBE_DECL6(hotspot, compiled__method__unload,
58 char*, int, char*, int, char*, int); 58 char*, int, char*, int, char*, int);
59 59
60 #define DTRACE_METHOD_UNLOAD_PROBE(method) \ 60 #define DTRACE_METHOD_UNLOAD_PROBE(method) \
61 { \ 61 { \
62 methodOop m = (method); \ 62 Method* m = (method); \
63 if (m != NULL) { \ 63 if (m != NULL) { \
64 Symbol* klass_name = m->klass_name(); \ 64 Symbol* klass_name = m->klass_name(); \
65 Symbol* name = m->name(); \ 65 Symbol* name = m->name(); \
66 Symbol* signature = m->signature(); \ 66 Symbol* signature = m->signature(); \
67 HS_DTRACE_PROBE6(hotspot, compiled__method__unload, \ 67 HS_DTRACE_PROBE6(hotspot, compiled__method__unload, \
71 } \ 71 } \
72 } 72 }
73 #else /* USDT2 */ 73 #else /* USDT2 */
74 #define DTRACE_METHOD_UNLOAD_PROBE(method) \ 74 #define DTRACE_METHOD_UNLOAD_PROBE(method) \
75 { \ 75 { \
76 methodOop m = (method); \ 76 Method* m = (method); \
77 if (m != NULL) { \ 77 if (m != NULL) { \
78 Symbol* klass_name = m->klass_name(); \ 78 Symbol* klass_name = m->klass_name(); \
79 Symbol* name = m->name(); \ 79 Symbol* name = m->name(); \
80 Symbol* signature = m->signature(); \ 80 Symbol* signature = m->signature(); \
81 HOTSPOT_COMPILED_METHOD_UNLOAD( \ 81 HOTSPOT_COMPILED_METHOD_UNLOAD( \
493 int frame_complete, 493 int frame_complete,
494 int frame_size, 494 int frame_size,
495 ByteSize basic_lock_owner_sp_offset, 495 ByteSize basic_lock_owner_sp_offset,
496 ByteSize basic_lock_sp_offset, 496 ByteSize basic_lock_sp_offset,
497 OopMapSet* oop_maps) { 497 OopMapSet* oop_maps) {
498 code_buffer->finalize_oop_references(method);
498 // create nmethod 499 // create nmethod
499 nmethod* nm = NULL; 500 nmethod* nm = NULL;
500 { 501 {
501 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 502 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
502 int native_nmethod_size = allocation_size(code_buffer, sizeof(nmethod)); 503 int native_nmethod_size = allocation_size(code_buffer, sizeof(nmethod));
527 CodeBuffer *code_buffer, 528 CodeBuffer *code_buffer,
528 int vep_offset, 529 int vep_offset,
529 int trap_offset, 530 int trap_offset,
530 int frame_complete, 531 int frame_complete,
531 int frame_size) { 532 int frame_size) {
533 code_buffer->finalize_oop_references(method);
532 // create nmethod 534 // create nmethod
533 nmethod* nm = NULL; 535 nmethod* nm = NULL;
534 { 536 {
535 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 537 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
536 int nmethod_size = allocation_size(code_buffer, sizeof(nmethod)); 538 int nmethod_size = allocation_size(code_buffer, sizeof(nmethod));
571 AbstractCompiler* compiler, 573 AbstractCompiler* compiler,
572 int comp_level 574 int comp_level
573 ) 575 )
574 { 576 {
575 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR"); 577 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
578 code_buffer->finalize_oop_references(method);
576 // create nmethod 579 // create nmethod
577 nmethod* nm = NULL; 580 nmethod* nm = NULL;
578 { MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 581 { MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
579 int nmethod_size = 582 int nmethod_size =
580 allocation_size(code_buffer, sizeof(nmethod)) 583 allocation_size(code_buffer, sizeof(nmethod))
599 // which are dependent on those classes. The slow way is to 602 // which are dependent on those classes. The slow way is to
600 // check every nmethod for dependencies which makes it linear in 603 // check every nmethod for dependencies which makes it linear in
601 // the number of methods compiled. For applications with a lot 604 // the number of methods compiled. For applications with a lot
602 // classes the slow way is too slow. 605 // classes the slow way is too slow.
603 for (Dependencies::DepStream deps(nm); deps.next(); ) { 606 for (Dependencies::DepStream deps(nm); deps.next(); ) {
604 klassOop klass = deps.context_type(); 607 Klass* klass = deps.context_type();
605 if (klass == NULL) continue; // ignore things like evol_method 608 if (klass == NULL) continue; // ignore things like evol_method
606 609
607 // record this nmethod as dependent on this klass 610 // record this nmethod as dependent on this klass
608 instanceKlass::cast(klass)->add_dependent_nmethod(nm); 611 InstanceKlass::cast(klass)->add_dependent_nmethod(nm);
609 } 612 }
610 } 613 }
611 NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_nmethod(nm)); 614 NOT_PRODUCT(if (nm != NULL) nmethod_stats.note_nmethod(nm));
612 if (PrintAssembly && nm != NULL) 615 if (PrintAssembly && nm != NULL)
613 Disassembler::decode(nm); 616 Disassembler::decode(nm);
625 } 628 }
626 629
627 630
628 // For native wrappers 631 // For native wrappers
629 nmethod::nmethod( 632 nmethod::nmethod(
630 methodOop method, 633 Method* method,
631 int nmethod_size, 634 int nmethod_size,
632 int compile_id, 635 int compile_id,
633 CodeOffsets* offsets, 636 CodeOffsets* offsets,
634 CodeBuffer* code_buffer, 637 CodeBuffer* code_buffer,
635 int frame_size, 638 int frame_size,
656 _orig_pc_offset = 0; 659 _orig_pc_offset = 0;
657 660
658 _consts_offset = data_offset(); 661 _consts_offset = data_offset();
659 _stub_offset = data_offset(); 662 _stub_offset = data_offset();
660 _oops_offset = data_offset(); 663 _oops_offset = data_offset();
661 _scopes_data_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize); 664 _metadata_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize);
665 _scopes_data_offset = _metadata_offset + round_to(code_buffer->total_metadata_size(), wordSize);
662 _scopes_pcs_offset = _scopes_data_offset; 666 _scopes_pcs_offset = _scopes_data_offset;
663 _dependencies_offset = _scopes_pcs_offset; 667 _dependencies_offset = _scopes_pcs_offset;
664 _handler_table_offset = _dependencies_offset; 668 _handler_table_offset = _dependencies_offset;
665 _nul_chk_table_offset = _handler_table_offset; 669 _nul_chk_table_offset = _handler_table_offset;
666 _nmethod_end_offset = _nul_chk_table_offset; 670 _nmethod_end_offset = _nul_chk_table_offset;
670 _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry); 674 _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry);
671 _osr_entry_point = NULL; 675 _osr_entry_point = NULL;
672 _exception_cache = NULL; 676 _exception_cache = NULL;
673 _pc_desc_cache.reset_to(NULL); 677 _pc_desc_cache.reset_to(NULL);
674 678
675 code_buffer->copy_oops_to(this); 679 code_buffer->copy_values_to(this);
676 if (ScavengeRootsInCode && detect_scavenge_root_oops()) { 680 if (ScavengeRootsInCode && detect_scavenge_root_oops()) {
677 CodeCache::add_scavenge_root_nmethod(this); 681 CodeCache::add_scavenge_root_nmethod(this);
678 } 682 }
679 debug_only(verify_scavenge_root_oops()); 683 debug_only(verify_scavenge_root_oops());
680 CodeCache::commit(this); 684 CodeCache::commit(this);
708 } 712 }
709 713
710 // For dtrace wrappers 714 // For dtrace wrappers
711 #ifdef HAVE_DTRACE_H 715 #ifdef HAVE_DTRACE_H
712 nmethod::nmethod( 716 nmethod::nmethod(
713 methodOop method, 717 Method* method,
714 int nmethod_size, 718 int nmethod_size,
715 CodeOffsets* offsets, 719 CodeOffsets* offsets,
716 CodeBuffer* code_buffer, 720 CodeBuffer* code_buffer,
717 int frame_size) 721 int frame_size)
718 : CodeBlob("dtrace nmethod", code_buffer, sizeof(nmethod), 722 : CodeBlob("dtrace nmethod", code_buffer, sizeof(nmethod),
736 _trap_offset = offsets->value(CodeOffsets::Dtrace_trap); 740 _trap_offset = offsets->value(CodeOffsets::Dtrace_trap);
737 _orig_pc_offset = 0; 741 _orig_pc_offset = 0;
738 _consts_offset = data_offset(); 742 _consts_offset = data_offset();
739 _stub_offset = data_offset(); 743 _stub_offset = data_offset();
740 _oops_offset = data_offset(); 744 _oops_offset = data_offset();
741 _scopes_data_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize); 745 _metadata_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize);
746 _scopes_data_offset = _metadata_offset + round_to(code_buffer->total_metadata_size(), wordSize);
742 _scopes_pcs_offset = _scopes_data_offset; 747 _scopes_pcs_offset = _scopes_data_offset;
743 _dependencies_offset = _scopes_pcs_offset; 748 _dependencies_offset = _scopes_pcs_offset;
744 _handler_table_offset = _dependencies_offset; 749 _handler_table_offset = _dependencies_offset;
745 _nul_chk_table_offset = _handler_table_offset; 750 _nul_chk_table_offset = _handler_table_offset;
746 _nmethod_end_offset = _nul_chk_table_offset; 751 _nmethod_end_offset = _nul_chk_table_offset;
750 _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry); 755 _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry);
751 _osr_entry_point = NULL; 756 _osr_entry_point = NULL;
752 _exception_cache = NULL; 757 _exception_cache = NULL;
753 _pc_desc_cache.reset_to(NULL); 758 _pc_desc_cache.reset_to(NULL);
754 759
755 code_buffer->copy_oops_to(this); 760 code_buffer->copy_values_to(this);
756 debug_only(verify_scavenge_root_oops()); 761 debug_only(verify_scavenge_root_oops());
757 CodeCache::commit(this); 762 CodeCache::commit(this);
758 } 763 }
759 764
760 if (PrintNMethods || PrintDebugInfo || PrintRelocations || PrintDependencies) { 765 if (PrintNMethods || PrintDebugInfo || PrintRelocations || PrintDependencies) {
790 return CodeCache::allocate(nmethod_size); 795 return CodeCache::allocate(nmethod_size);
791 } 796 }
792 797
793 798
794 nmethod::nmethod( 799 nmethod::nmethod(
795 methodOop method, 800 Method* method,
796 int nmethod_size, 801 int nmethod_size,
797 int compile_id, 802 int compile_id,
798 int entry_bci, 803 int entry_bci,
799 CodeOffsets* offsets, 804 CodeOffsets* offsets,
800 int orig_pc_offset, 805 int orig_pc_offset,
845 } else { 850 } else {
846 _unwind_handler_offset = -1; 851 _unwind_handler_offset = -1;
847 } 852 }
848 853
849 _oops_offset = data_offset(); 854 _oops_offset = data_offset();
850 _scopes_data_offset = _oops_offset + round_to(code_buffer->total_oop_size (), oopSize); 855 _metadata_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize);
856 _scopes_data_offset = _metadata_offset + round_to(code_buffer->total_metadata_size(), wordSize);
857
851 _scopes_pcs_offset = _scopes_data_offset + round_to(debug_info->data_size (), oopSize); 858 _scopes_pcs_offset = _scopes_data_offset + round_to(debug_info->data_size (), oopSize);
852 _dependencies_offset = _scopes_pcs_offset + adjust_pcs_size(debug_info->pcs_size()); 859 _dependencies_offset = _scopes_pcs_offset + adjust_pcs_size(debug_info->pcs_size());
853 _handler_table_offset = _dependencies_offset + round_to(dependencies->size_in_bytes (), oopSize); 860 _handler_table_offset = _dependencies_offset + round_to(dependencies->size_in_bytes (), oopSize);
854 _nul_chk_table_offset = _handler_table_offset + round_to(handler_table->size_in_bytes(), oopSize); 861 _nul_chk_table_offset = _handler_table_offset + round_to(handler_table->size_in_bytes(), oopSize);
855 _nmethod_end_offset = _nul_chk_table_offset + round_to(nul_chk_table->size_in_bytes(), oopSize); 862 _nmethod_end_offset = _nul_chk_table_offset + round_to(nul_chk_table->size_in_bytes(), oopSize);
859 _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry); 866 _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
860 _exception_cache = NULL; 867 _exception_cache = NULL;
861 _pc_desc_cache.reset_to(scopes_pcs_begin()); 868 _pc_desc_cache.reset_to(scopes_pcs_begin());
862 869
863 // Copy contents of ScopeDescRecorder to nmethod 870 // Copy contents of ScopeDescRecorder to nmethod
864 code_buffer->copy_oops_to(this); 871 code_buffer->copy_values_to(this);
865 debug_info->copy_to(this); 872 debug_info->copy_to(this);
866 dependencies->copy_to(this); 873 dependencies->copy_to(this);
867 if (ScavengeRootsInCode && detect_scavenge_root_oops()) { 874 if (ScavengeRootsInCode && detect_scavenge_root_oops()) {
868 CodeCache::add_scavenge_root_nmethod(this); 875 CodeCache::add_scavenge_root_nmethod(this);
869 } 876 }
1001 (*dest) = JNIHandles::resolve_non_null(handle); 1008 (*dest) = JNIHandles::resolve_non_null(handle);
1002 } 1009 }
1003 } 1010 }
1004 1011
1005 1012
1006 void nmethod::copy_oops(GrowableArray<jobject>* array) { 1013 // Have to have the same name because it's called by a template
1007 //assert(oops_size() == 0, "do this handshake just once, please"); 1014 void nmethod::copy_values(GrowableArray<jobject>* array) {
1008 int length = array->length(); 1015 int length = array->length();
1009 assert((address)(oops_begin() + length) <= data_end(), "oops big enough"); 1016 assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
1010 oop* dest = oops_begin(); 1017 oop* dest = oops_begin();
1011 for (int index = 0 ; index < length; index++) { 1018 for (int index = 0 ; index < length; index++) {
1012 initialize_immediate_oop(&dest[index], array->at(index)); 1019 initialize_immediate_oop(&dest[index], array->at(index));
1013 } 1020 }
1014 1021
1018 // CodeBlob constructor, so it is valid even at this early point to 1025 // CodeBlob constructor, so it is valid even at this early point to
1019 // iterate over relocations and patch the code. 1026 // iterate over relocations and patch the code.
1020 fix_oop_relocations(NULL, NULL, /*initialize_immediates=*/ true); 1027 fix_oop_relocations(NULL, NULL, /*initialize_immediates=*/ true);
1021 } 1028 }
1022 1029
1030 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
1031 int length = array->length();
1032 assert((address)(metadata_begin() + length) <= (address)metadata_end(), "big enough");
1033 Metadata** dest = metadata_begin();
1034 for (int index = 0 ; index < length; index++) {
1035 dest[index] = array->at(index);
1036 }
1037 }
1023 1038
1024 bool nmethod::is_at_poll_return(address pc) { 1039 bool nmethod::is_at_poll_return(address pc) {
1025 RelocIterator iter(this, pc, pc+1); 1040 RelocIterator iter(this, pc, pc+1);
1026 while (iter.next()) { 1041 while (iter.next()) {
1027 if (iter.type() == relocInfo::poll_return_type) 1042 if (iter.type() == relocInfo::poll_return_type)
1052 oop* dest = reloc->oop_addr(); 1067 oop* dest = reloc->oop_addr();
1053 initialize_immediate_oop(dest, (jobject) *dest); 1068 initialize_immediate_oop(dest, (jobject) *dest);
1054 } 1069 }
1055 // Refresh the oop-related bits of this instruction. 1070 // Refresh the oop-related bits of this instruction.
1056 reloc->fix_oop_relocation(); 1071 reloc->fix_oop_relocation();
1072 } else if (iter.type() == relocInfo::metadata_type) {
1073 metadata_Relocation* reloc = iter.metadata_reloc();
1074 reloc->fix_metadata_relocation();
1057 } 1075 }
1058 1076
1059 // There must not be any interfering patches or breakpoints. 1077 // There must not be any interfering patches or breakpoints.
1060 assert(!(iter.type() == relocInfo::breakpoint_type 1078 assert(!(iter.type() == relocInfo::breakpoint_type
1061 && iter.breakpoint_reloc()->active()), 1079 && iter.breakpoint_reloc()->active()),
1170 } 1188 }
1171 1189
1172 void nmethod::inc_decompile_count() { 1190 void nmethod::inc_decompile_count() {
1173 if (!is_compiled_by_c2()) return; 1191 if (!is_compiled_by_c2()) return;
1174 // Could be gated by ProfileTraps, but do not bother... 1192 // Could be gated by ProfileTraps, but do not bother...
1175 methodOop m = method(); 1193 Method* m = method();
1176 if (m == NULL) return; 1194 if (m == NULL) return;
1177 methodDataOop mdo = m->method_data(); 1195 MethodData* mdo = m->method_data();
1178 if (mdo == NULL) return; 1196 if (mdo == NULL) return;
1179 // There is a benign race here. See comments in methodDataOop.hpp. 1197 // There is a benign race here. See comments in methodData.hpp.
1180 mdo->inc_decompile_count(); 1198 mdo->inc_decompile_count();
1181 } 1199 }
1182 1200
1183 void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) { 1201 void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) {
1184 1202
1193 flush_dependencies(is_alive); 1211 flush_dependencies(is_alive);
1194 1212
1195 // Break cycle between nmethod & method 1213 // Break cycle between nmethod & method
1196 if (TraceClassUnloading && WizardMode) { 1214 if (TraceClassUnloading && WizardMode) {
1197 tty->print_cr("[Class unloading: Making nmethod " INTPTR_FORMAT 1215 tty->print_cr("[Class unloading: Making nmethod " INTPTR_FORMAT
1198 " unloadable], methodOop(" INTPTR_FORMAT 1216 " unloadable], Method*(" INTPTR_FORMAT
1199 "), cause(" INTPTR_FORMAT ")", 1217 "), cause(" INTPTR_FORMAT ")",
1200 this, (address)_method, (address)cause); 1218 this, (address)_method, (address)cause);
1201 if (!Universe::heap()->is_gc_active()) 1219 if (!Universe::heap()->is_gc_active())
1202 cause->klass()->print(); 1220 cause->klass()->print();
1203 } 1221 }
1204 // Unlink the osr method, so we do not look this up again 1222 // Unlink the osr method, so we do not look this up again
1205 if (is_osr_method()) { 1223 if (is_osr_method()) {
1206 invalidate_osr_method(); 1224 invalidate_osr_method();
1207 } 1225 }
1208 // If _method is already NULL the methodOop is about to be unloaded, 1226 // If _method is already NULL the Method* is about to be unloaded,
1209 // so we don't have to break the cycle. Note that it is possible to 1227 // so we don't have to break the cycle. Note that it is possible to
1210 // have the methodOop live here, in case we unload the nmethod because 1228 // have the Method* live here, in case we unload the nmethod because
1211 // it is pointing to some oop (other than the methodOop) being unloaded. 1229 // it is pointing to some oop (other than the Method*) being unloaded.
1212 if (_method != NULL) { 1230 if (_method != NULL) {
1213 // OSR methods point to the methodOop, but the methodOop does not 1231 // OSR methods point to the Method*, but the Method* does not
1214 // point back! 1232 // point back!
1215 if (_method->code() == this) { 1233 if (_method->code() == this) {
1216 _method->clear_code(); // Break a cycle 1234 _method->clear_code(); // Break a cycle
1217 } 1235 }
1218 _method = NULL; // Clear the method of this dead nmethod 1236 _method = NULL; // Clear the method of this dead nmethod
1228 _state = unloaded; 1246 _state = unloaded;
1229 1247
1230 // Log the unloading. 1248 // Log the unloading.
1231 log_state_change(); 1249 log_state_change();
1232 1250
1233 // The methodOop is gone at this point 1251 // The Method* is gone at this point
1234 assert(_method == NULL, "Tautology"); 1252 assert(_method == NULL, "Tautology");
1235 1253
1236 set_osr_link(NULL); 1254 set_osr_link(NULL);
1237 //set_scavenge_root_link(NULL); // done by prune_scavenge_root_nmethods 1255 //set_scavenge_root_link(NULL); // done by prune_scavenge_root_nmethods
1238 NMethodSweeper::notify(this); 1256 NMethodSweeper::notify(this);
1240 1258
1241 void nmethod::invalidate_osr_method() { 1259 void nmethod::invalidate_osr_method() {
1242 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod"); 1260 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1243 // Remove from list of active nmethods 1261 // Remove from list of active nmethods
1244 if (method() != NULL) 1262 if (method() != NULL)
1245 instanceKlass::cast(method()->method_holder())->remove_osr_nmethod(this); 1263 InstanceKlass::cast(method()->method_holder())->remove_osr_nmethod(this);
1246 // Set entry as invalid 1264 // Set entry as invalid
1247 _entry_bci = InvalidOSREntryBci; 1265 _entry_bci = InvalidOSREntryBci;
1248 } 1266 }
1249 1267
1250 void nmethod::log_state_change() const { 1268 void nmethod::log_state_change() const {
1318 log_state_change(); 1336 log_state_change();
1319 1337
1320 // Remove nmethod from method. 1338 // Remove nmethod from method.
1321 // We need to check if both the _code and _from_compiled_code_entry_point 1339 // We need to check if both the _code and _from_compiled_code_entry_point
1322 // refer to this nmethod because there is a race in setting these two fields 1340 // refer to this nmethod because there is a race in setting these two fields
1323 // in methodOop as seen in bugid 4947125. 1341 // in Method* as seen in bugid 4947125.
1324 // If the vep() points to the zombie nmethod, the memory for the nmethod 1342 // If the vep() points to the zombie nmethod, the memory for the nmethod
1325 // could be flushed and the compiler and vtable stubs could still call 1343 // could be flushed and the compiler and vtable stubs could still call
1326 // through it. 1344 // through it.
1327 if (method() != NULL && (method()->code() == this || 1345 if (method() != NULL && (method()->code() == this ||
1328 method()->from_compiled_entry() == verified_entry_point())) { 1346 method()->from_compiled_entry() == verified_entry_point())) {
1438 assert(Universe::heap()->is_gc_active() == (is_alive != NULL), 1456 assert(Universe::heap()->is_gc_active() == (is_alive != NULL),
1439 "is_alive is non-NULL if and only if we are called during GC"); 1457 "is_alive is non-NULL if and only if we are called during GC");
1440 if (!has_flushed_dependencies()) { 1458 if (!has_flushed_dependencies()) {
1441 set_has_flushed_dependencies(); 1459 set_has_flushed_dependencies();
1442 for (Dependencies::DepStream deps(this); deps.next(); ) { 1460 for (Dependencies::DepStream deps(this); deps.next(); ) {
1443 klassOop klass = deps.context_type(); 1461 Klass* klass = deps.context_type();
1444 if (klass == NULL) continue; // ignore things like evol_method 1462 if (klass == NULL) continue; // ignore things like evol_method
1445 1463
1446 // During GC the is_alive closure is non-NULL, and is used to 1464 // During GC the is_alive closure is non-NULL, and is used to
1447 // determine liveness of dependees that need to be updated. 1465 // determine liveness of dependees that need to be updated.
1448 if (is_alive == NULL || is_alive->do_object_b(klass)) { 1466 if (is_alive == NULL || klass->is_loader_alive(is_alive)) {
1449 instanceKlass::cast(klass)->remove_dependent_nmethod(this); 1467 InstanceKlass::cast(klass)->remove_dependent_nmethod(this);
1450 } 1468 }
1451 } 1469 }
1452 } 1470 }
1453 } 1471 }
1454 1472
1460 assert(root != NULL, "just checking"); 1478 assert(root != NULL, "just checking");
1461 oop obj = *root; 1479 oop obj = *root;
1462 if (obj == NULL || is_alive->do_object_b(obj)) { 1480 if (obj == NULL || is_alive->do_object_b(obj)) {
1463 return false; 1481 return false;
1464 } 1482 }
1465 if (obj->is_compiledICHolder()) { 1483
1466 compiledICHolderOop cichk_oop = compiledICHolderOop(obj);
1467 if (is_alive->do_object_b(
1468 cichk_oop->holder_method()->method_holder()) &&
1469 is_alive->do_object_b(cichk_oop->holder_klass())) {
1470 // The oop should be kept alive
1471 keep_alive->do_oop(root);
1472 return false;
1473 }
1474 }
1475 // If ScavengeRootsInCode is true, an nmethod might be unloaded 1484 // If ScavengeRootsInCode is true, an nmethod might be unloaded
1476 // simply because one of its constant oops has gone dead. 1485 // simply because one of its constant oops has gone dead.
1477 // No actual classes need to be unloaded in order for this to occur. 1486 // No actual classes need to be unloaded in order for this to occur.
1478 assert(unloading_occurred || ScavengeRootsInCode, "Inconsistency in unloading"); 1487 assert(unloading_occurred || ScavengeRootsInCode, "Inconsistency in unloading");
1479 make_unloaded(is_alive, obj); 1488 make_unloaded(is_alive, obj);
1484 // post_compiled_method_load_event 1493 // post_compiled_method_load_event
1485 // new method for install_code() path 1494 // new method for install_code() path
1486 // Transfer information from compilation to jvmti 1495 // Transfer information from compilation to jvmti
1487 void nmethod::post_compiled_method_load_event() { 1496 void nmethod::post_compiled_method_load_event() {
1488 1497
1489 methodOop moop = method(); 1498 Method* moop = method();
1490 #ifndef USDT2 1499 #ifndef USDT2
1491 HS_DTRACE_PROBE8(hotspot, compiled__method__load, 1500 HS_DTRACE_PROBE8(hotspot, compiled__method__load,
1492 moop->klass_name()->bytes(), 1501 moop->klass_name()->bytes(),
1493 moop->klass_name()->utf8_length(), 1502 moop->klass_name()->utf8_length(),
1494 moop->name()->bytes(), 1503 moop->name()->bytes(),
1539 assert(_method != NULL && !is_unloaded(), "just checking"); 1548 assert(_method != NULL && !is_unloaded(), "just checking");
1540 DTRACE_METHOD_UNLOAD_PROBE(method()); 1549 DTRACE_METHOD_UNLOAD_PROBE(method());
1541 1550
1542 // If a JVMTI agent has enabled the CompiledMethodUnload event then 1551 // If a JVMTI agent has enabled the CompiledMethodUnload event then
1543 // post the event. Sometime later this nmethod will be made a zombie 1552 // post the event. Sometime later this nmethod will be made a zombie
1544 // by the sweeper but the methodOop will not be valid at that point. 1553 // by the sweeper but the Method* will not be valid at that point.
1545 // If the _jmethod_id is null then no load event was ever requested 1554 // If the _jmethod_id is null then no load event was ever requested
1546 // so don't bother posting the unload. The main reason for this is 1555 // so don't bother posting the unload. The main reason for this is
1547 // that the jmethodID is a weak reference to the methodOop so if 1556 // that the jmethodID is a weak reference to the Method* so if
1548 // it's being unloaded there's no way to look it up since the weak 1557 // it's being unloaded there's no way to look it up since the weak
1549 // ref will have been cleared. 1558 // ref will have been cleared.
1550 if (_jmethod_id != NULL && JvmtiExport::should_post_compiled_method_unload()) { 1559 if (_jmethod_id != NULL && JvmtiExport::should_post_compiled_method_unload()) {
1551 assert(!unload_reported(), "already unloaded"); 1560 assert(!unload_reported(), "already unloaded");
1552 JvmtiDeferredEvent event = 1561 JvmtiDeferredEvent event =
1600 // call to post_compiled_method_unload() so that the unloading 1609 // call to post_compiled_method_unload() so that the unloading
1601 // of this nmethod is reported. 1610 // of this nmethod is reported.
1602 unloading_occurred = true; 1611 unloading_occurred = true;
1603 } 1612 }
1604 1613
1605 // Follow methodOop
1606 if (can_unload(is_alive, keep_alive, (oop*)&_method, unloading_occurred)) {
1607 return;
1608 }
1609
1610 // Exception cache 1614 // Exception cache
1611 ExceptionCache* ec = exception_cache(); 1615 ExceptionCache* ec = exception_cache();
1612 while (ec != NULL) { 1616 while (ec != NULL) {
1613 oop* ex_addr = (oop*)ec->exception_type_addr(); 1617 Klass* ex_klass = ec->exception_type();
1614 oop ex = *ex_addr;
1615 ExceptionCache* next_ec = ec->next(); 1618 ExceptionCache* next_ec = ec->next();
1616 if (ex != NULL && !is_alive->do_object_b(ex)) { 1619 if (ex_klass != NULL && !ex_klass->is_loader_alive(is_alive)) {
1617 assert(!ex->is_compiledICHolder(), "Possible error here");
1618 remove_from_exception_cache(ec); 1620 remove_from_exception_cache(ec);
1619 } 1621 }
1620 ec = next_ec; 1622 ec = next_ec;
1621 } 1623 }
1622 1624
1627 if (unloading_occurred) { 1629 if (unloading_occurred) {
1628 RelocIterator iter(this, low_boundary); 1630 RelocIterator iter(this, low_boundary);
1629 while(iter.next()) { 1631 while(iter.next()) {
1630 if (iter.type() == relocInfo::virtual_call_type) { 1632 if (iter.type() == relocInfo::virtual_call_type) {
1631 CompiledIC *ic = CompiledIC_at(iter.reloc()); 1633 CompiledIC *ic = CompiledIC_at(iter.reloc());
1632 oop ic_oop = ic->cached_oop(); 1634 if (ic->is_icholder_call()) {
1633 if (ic_oop != NULL && !is_alive->do_object_b(ic_oop)) {
1634 // The only exception is compiledICHolder oops which may 1635 // The only exception is compiledICHolder oops which may
1635 // yet be marked below. (We check this further below). 1636 // yet be marked below. (We check this further below).
1636 if (ic_oop->is_compiledICHolder()) { 1637 CompiledICHolder* cichk_oop = ic->cached_icholder();
1637 compiledICHolderOop cichk_oop = compiledICHolderOop(ic_oop); 1638 if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) &&
1638 if (is_alive->do_object_b( 1639 cichk_oop->holder_klass()->is_loader_alive(is_alive)) {
1639 cichk_oop->holder_method()->method_holder()) &&
1640 is_alive->do_object_b(cichk_oop->holder_klass())) {
1641 continue; 1640 continue;
1642 } 1641 }
1642 } else {
1643 Metadata* ic_oop = ic->cached_metadata();
1644 if (ic_oop != NULL) {
1645 if (ic_oop->is_klass()) {
1646 if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
1647 continue;
1648 }
1649 } else if (ic_oop->is_method()) {
1650 if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
1651 continue;
1652 }
1653 } else {
1654 ShouldNotReachHere();
1655 }
1656 }
1643 } 1657 }
1644 ic->set_to_clean(); 1658 ic->set_to_clean();
1645 assert(ic->cached_oop() == NULL,
1646 "cached oop in IC should be cleared");
1647 }
1648 } 1659 }
1649 } 1660 }
1650 } 1661 }
1651 1662
1652 // Compiled code 1663 // Compiled code
1664 {
1653 RelocIterator iter(this, low_boundary); 1665 RelocIterator iter(this, low_boundary);
1654 while (iter.next()) { 1666 while (iter.next()) {
1655 if (iter.type() == relocInfo::oop_type) { 1667 if (iter.type() == relocInfo::oop_type) {
1656 oop_Relocation* r = iter.oop_reloc(); 1668 oop_Relocation* r = iter.oop_reloc();
1657 // In this loop, we must only traverse those oops directly embedded in 1669 // In this loop, we must only traverse those oops directly embedded in
1664 return; 1676 return;
1665 } 1677 }
1666 } 1678 }
1667 } 1679 }
1668 } 1680 }
1681 }
1669 1682
1670 1683
1671 // Scopes 1684 // Scopes
1672 for (oop* p = oops_begin(); p < oops_end(); p++) { 1685 for (oop* p = oops_begin(); p < oops_end(); p++) {
1673 if (*p == Universe::non_oop_word()) continue; // skip non-oops 1686 if (*p == Universe::non_oop_word()) continue; // skip non-oops
1674 if (can_unload(is_alive, keep_alive, p, unloading_occurred)) { 1687 if (can_unload(is_alive, keep_alive, p, unloading_occurred)) {
1675 return; 1688 return;
1676 } 1689 }
1677 } 1690 }
1678 1691
1679 #ifndef PRODUCT 1692 // Ensure that all metadata is still alive
1680 // This nmethod was not unloaded; check below that all CompiledICs 1693 verify_metadata_loaders(low_boundary, is_alive);
1681 // refer to marked oops. 1694 }
1682 { 1695
1696 #ifdef ASSERT
1697
1698 class CheckClass : AllStatic {
1699 static BoolObjectClosure* _is_alive;
1700
1701 // Check class_loader is alive for this bit of metadata.
1702 static void check_class(Metadata* md) {
1703 Klass* klass = NULL;
1704 if (md->is_klass()) {
1705 klass = ((Klass*)md);
1706 } else if (md->is_method()) {
1707 klass = ((Method*)md)->method_holder();
1708 } else if (md->is_methodData()) {
1709 klass = ((MethodData*)md)->method()->method_holder();
1710 } else {
1711 md->print();
1712 ShouldNotReachHere();
1713 }
1714 assert(klass->is_loader_alive(_is_alive), "must be alive");
1715 }
1716 public:
1717 static void do_check_class(BoolObjectClosure* is_alive, nmethod* nm) {
1718 assert(SafepointSynchronize::is_at_safepoint(), "this is only ok at safepoint");
1719 _is_alive = is_alive;
1720 nm->metadata_do(check_class);
1721 }
1722 };
1723
1724 // This is called during a safepoint so can use static data
1725 BoolObjectClosure* CheckClass::_is_alive = NULL;
1726 #endif // ASSERT
1727
1728
1729 // Processing of oop references should have been sufficient to keep
1730 // all strong references alive. Any weak references should have been
1731 // cleared as well. Visit all the metadata and ensure that it's
1732 // really alive.
1733 void nmethod::verify_metadata_loaders(address low_boundary, BoolObjectClosure* is_alive) {
1734 #ifdef ASSERT
1683 RelocIterator iter(this, low_boundary); 1735 RelocIterator iter(this, low_boundary);
1684 while (iter.next()) { 1736 while (iter.next()) {
1685 if (iter.type() == relocInfo::virtual_call_type) { 1737 // static_stub_Relocations may have dangling references to
1686 CompiledIC *ic = CompiledIC_at(iter.reloc()); 1738 // Method*s so trim them out here. Otherwise it looks like
1687 oop ic_oop = ic->cached_oop(); 1739 // compiled code is maintaining a link to dead metadata.
1688 assert(ic_oop == NULL || is_alive->do_object_b(ic_oop), 1740 address static_call_addr = NULL;
1689 "Found unmarked ic_oop in reachable nmethod"); 1741 if (iter.type() == relocInfo::opt_virtual_call_type) {
1690 } 1742 CompiledIC* cic = CompiledIC_at(iter.reloc());
1691 } 1743 if (!cic->is_call_to_interpreted()) {
1692 } 1744 static_call_addr = iter.addr();
1693 #endif // !PRODUCT 1745 }
1694 } 1746 } else if (iter.type() == relocInfo::static_call_type) {
1747 CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
1748 if (!csc->is_call_to_interpreted()) {
1749 static_call_addr = iter.addr();
1750 }
1751 }
1752 if (static_call_addr != NULL) {
1753 RelocIterator sciter(this, low_boundary);
1754 while (sciter.next()) {
1755 if (sciter.type() == relocInfo::static_stub_type &&
1756 sciter.static_stub_reloc()->static_call() == static_call_addr) {
1757 sciter.static_stub_reloc()->clear_inline_cache();
1758 }
1759 }
1760 }
1761 }
1762 // Check that the metadata embedded in the nmethod is alive
1763 CheckClass::do_check_class(is_alive, this);
1764 #endif
1765 }
1766
1767
1768 // Iterate over metadata calling this function. Used by RedefineClasses
1769 void nmethod::metadata_do(void f(Metadata*)) {
1770 address low_boundary = verified_entry_point();
1771 if (is_not_entrant()) {
1772 low_boundary += NativeJump::instruction_size;
1773 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump.
1774 // (See comment above.)
1775 }
1776 {
1777 // Visit all immediate references that are embedded in the instruction stream.
1778 RelocIterator iter(this, low_boundary);
1779 while (iter.next()) {
1780 if (iter.type() == relocInfo::metadata_type ) {
1781 metadata_Relocation* r = iter.metadata_reloc();
1782 // In this lmetadata, we must only follow those metadatas directly embedded in
1783 // the code. Other metadatas (oop_index>0) are seen as part of
1784 // the metadata section below.
1785 assert(1 == (r->metadata_is_immediate()) +
1786 (r->metadata_addr() >= metadata_begin() && r->metadata_addr() < metadata_end()),
1787 "metadata must be found in exactly one place");
1788 if (r->metadata_is_immediate() && r->metadata_value() != NULL) {
1789 Metadata* md = r->metadata_value();
1790 f(md);
1791 }
1792 }
1793 }
1794 }
1795
1796 // Visit the metadata section
1797 for (Metadata** p = metadata_begin(); p < metadata_end(); p++) {
1798 if (*p == Universe::non_oop_word() || *p == NULL) continue; // skip non-oops
1799 Metadata* md = *p;
1800 f(md);
1801 }
1802 // Call function Method*, not embedded in these other places.
1803 if (_method != NULL) f(_method);
1804 }
1805
1695 1806
1696 // This method is called twice during GC -- once while 1807 // This method is called twice during GC -- once while
1697 // tracing the "active" nmethods on thread stacks during 1808 // tracing the "active" nmethods on thread stacks during
1698 // the (strong) marking phase, and then again when walking 1809 // the (strong) marking phase, and then again when walking
1699 // the code cache contents during the weak roots processing 1810 // the code cache contents during the weak roots processing
1717 low_boundary += NativeJump::instruction_size; 1828 low_boundary += NativeJump::instruction_size;
1718 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump. 1829 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump.
1719 // (See comment above.) 1830 // (See comment above.)
1720 } 1831 }
1721 1832
1722 // Compiled code
1723 f->do_oop((oop*) &_method);
1724 if (!do_strong_roots_only) {
1725 // weak roots processing phase -- update ExceptionCache oops
1726 ExceptionCache* ec = exception_cache();
1727 while(ec != NULL) {
1728 f->do_oop((oop*)ec->exception_type_addr());
1729 ec = ec->next();
1730 }
1731 } // Else strong roots phase -- skip oops in ExceptionCache
1732
1733 RelocIterator iter(this, low_boundary); 1833 RelocIterator iter(this, low_boundary);
1734 1834
1735 while (iter.next()) { 1835 while (iter.next()) {
1736 if (iter.type() == relocInfo::oop_type ) { 1836 if (iter.type() == relocInfo::oop_type ) {
1737 oop_Relocation* r = iter.oop_reloc(); 1837 oop_Relocation* r = iter.oop_reloc();
2061 } 2161 }
2062 } 2162 }
2063 return found_check; 2163 return found_check;
2064 } 2164 }
2065 2165
2066 bool nmethod::is_evol_dependent_on(klassOop dependee) { 2166 bool nmethod::is_evol_dependent_on(Klass* dependee) {
2067 instanceKlass *dependee_ik = instanceKlass::cast(dependee); 2167 InstanceKlass *dependee_ik = InstanceKlass::cast(dependee);
2068 objArrayOop dependee_methods = dependee_ik->methods(); 2168 Array<Method*>* dependee_methods = dependee_ik->methods();
2069 for (Dependencies::DepStream deps(this); deps.next(); ) { 2169 for (Dependencies::DepStream deps(this); deps.next(); ) {
2070 if (deps.type() == Dependencies::evol_method) { 2170 if (deps.type() == Dependencies::evol_method) {
2071 methodOop method = deps.method_argument(0); 2171 Method* method = deps.method_argument(0);
2072 for (int j = 0; j < dependee_methods->length(); j++) { 2172 for (int j = 0; j < dependee_methods->length(); j++) {
2073 if ((methodOop) dependee_methods->obj_at(j) == method) { 2173 if (dependee_methods->at(j) == method) {
2074 // RC_TRACE macro has an embedded ResourceMark 2174 // RC_TRACE macro has an embedded ResourceMark
2075 RC_TRACE(0x01000000, 2175 RC_TRACE(0x01000000,
2076 ("Found evol dependency of nmethod %s.%s(%s) compile_id=%d on method %s.%s(%s)", 2176 ("Found evol dependency of nmethod %s.%s(%s) compile_id=%d on method %s.%s(%s)",
2077 _method->method_holder()->klass_part()->external_name(), 2177 _method->method_holder()->external_name(),
2078 _method->name()->as_C_string(), 2178 _method->name()->as_C_string(),
2079 _method->signature()->as_C_string(), compile_id(), 2179 _method->signature()->as_C_string(), compile_id(),
2080 method->method_holder()->klass_part()->external_name(), 2180 method->method_holder()->external_name(),
2081 method->name()->as_C_string(), 2181 method->name()->as_C_string(),
2082 method->signature()->as_C_string())); 2182 method->signature()->as_C_string()));
2083 if (TraceDependencies || LogCompilation) 2183 if (TraceDependencies || LogCompilation)
2084 deps.log_dependency(dependee); 2184 deps.log_dependency(dependee);
2085 return true; 2185 return true;
2089 } 2189 }
2090 return false; 2190 return false;
2091 } 2191 }
2092 2192
2093 // Called from mark_for_deoptimization, when dependee is invalidated. 2193 // Called from mark_for_deoptimization, when dependee is invalidated.
2094 bool nmethod::is_dependent_on_method(methodOop dependee) { 2194 bool nmethod::is_dependent_on_method(Method* dependee) {
2095 for (Dependencies::DepStream deps(this); deps.next(); ) { 2195 for (Dependencies::DepStream deps(this); deps.next(); ) {
2096 if (deps.type() != Dependencies::evol_method) 2196 if (deps.type() != Dependencies::evol_method)
2097 continue; 2197 continue;
2098 methodOop method = deps.method_argument(0); 2198 Method* method = deps.method_argument(0);
2099 if (method == dependee) return true; 2199 if (method == dependee) return true;
2100 } 2200 }
2101 return false; 2201 return false;
2102 } 2202 }
2103 2203
2232 return; 2332 return;
2233 2333
2234 // Make sure all the entry points are correctly aligned for patching. 2334 // Make sure all the entry points are correctly aligned for patching.
2235 NativeJump::check_verified_entry_alignment(entry_point(), verified_entry_point()); 2335 NativeJump::check_verified_entry_alignment(entry_point(), verified_entry_point());
2236 2336
2237 assert(method()->is_oop(), "must be valid"); 2337 // assert(method()->is_oop(), "must be valid");
2238 2338
2239 ResourceMark rm; 2339 ResourceMark rm;
2240 2340
2241 if (!CodeCache::contains(this)) { 2341 if (!CodeCache::contains(this)) {
2242 fatal(err_msg("nmethod at " INTPTR_FORMAT " not in zone", this)); 2342 fatal(err_msg("nmethod at " INTPTR_FORMAT " not in zone", this));
2272 CompiledIC* ic = NULL; 2372 CompiledIC* ic = NULL;
2273 Thread *cur = Thread::current(); 2373 Thread *cur = Thread::current();
2274 if (CompiledIC_lock->owner() == cur || 2374 if (CompiledIC_lock->owner() == cur ||
2275 ((cur->is_VM_thread() || cur->is_ConcurrentGC_thread()) && 2375 ((cur->is_VM_thread() || cur->is_ConcurrentGC_thread()) &&
2276 SafepointSynchronize::is_at_safepoint())) { 2376 SafepointSynchronize::is_at_safepoint())) {
2277 ic = CompiledIC_at(call_site); 2377 ic = CompiledIC_at(this, call_site);
2278 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops()); 2378 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
2279 } else { 2379 } else {
2280 MutexLocker ml_verify (CompiledIC_lock); 2380 MutexLocker ml_verify (CompiledIC_lock);
2281 ic = CompiledIC_at(call_site); 2381 ic = CompiledIC_at(this, call_site);
2282 } 2382 }
2283 PcDesc* pd = pc_desc_at(ic->end_of_call()); 2383 PcDesc* pd = pc_desc_at(ic->end_of_call());
2284 assert(pd != NULL, "PcDesc must exist"); 2384 assert(pd != NULL, "PcDesc must exist");
2285 for (ScopeDesc* sd = new ScopeDesc(this, pd->scope_decode_offset(), 2385 for (ScopeDesc* sd = new ScopeDesc(this, pd->scope_decode_offset(),
2286 pd->obj_decode_offset(), pd->should_reexecute(), 2386 pd->obj_decode_offset(), pd->should_reexecute(),
2411 stub_size()); 2511 stub_size());
2412 if (oops_size () > 0) tty->print_cr(" oops [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d", 2512 if (oops_size () > 0) tty->print_cr(" oops [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2413 oops_begin(), 2513 oops_begin(),
2414 oops_end(), 2514 oops_end(),
2415 oops_size()); 2515 oops_size());
2516 if (metadata_size () > 0) tty->print_cr(" metadata [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2517 metadata_begin(),
2518 metadata_end(),
2519 metadata_size());
2416 if (scopes_data_size () > 0) tty->print_cr(" scopes data [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d", 2520 if (scopes_data_size () > 0) tty->print_cr(" scopes data [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2417 scopes_data_begin(), 2521 scopes_data_begin(),
2418 scopes_data_end(), 2522 scopes_data_end(),
2419 scopes_data_size()); 2523 scopes_data_size());
2420 if (scopes_pcs_size () > 0) tty->print_cr(" scopes pcs [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d", 2524 if (scopes_pcs_size () > 0) tty->print_cr(" scopes pcs [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2460 ResourceMark rm; 2564 ResourceMark rm;
2461 ttyLocker ttyl; // keep the following output all in one block 2565 ttyLocker ttyl; // keep the following output all in one block
2462 tty->print_cr("Dependencies:"); 2566 tty->print_cr("Dependencies:");
2463 for (Dependencies::DepStream deps(this); deps.next(); ) { 2567 for (Dependencies::DepStream deps(this); deps.next(); ) {
2464 deps.print_dependency(); 2568 deps.print_dependency();
2465 klassOop ctxk = deps.context_type(); 2569 Klass* ctxk = deps.context_type();
2466 if (ctxk != NULL) { 2570 if (ctxk != NULL) {
2467 Klass* k = Klass::cast(ctxk); 2571 Klass* k = Klass::cast(ctxk);
2468 if (k->oop_is_instance() && ((instanceKlass*)k)->is_dependent_nmethod(this)) { 2572 if (k->oop_is_instance() && ((InstanceKlass*)k)->is_dependent_nmethod(this)) {
2469 tty->print_cr(" [nmethod<=klass]%s", k->external_name()); 2573 tty->print_cr(" [nmethod<=klass]%s", k->external_name());
2470 } 2574 }
2471 } 2575 }
2472 deps.log_dependency(); // put it into the xml log also 2576 deps.log_dependency(); // put it into the xml log also
2473 } 2577 }
2521 case relocInfo::oop_type: { 2625 case relocInfo::oop_type: {
2522 stringStream st; 2626 stringStream st;
2523 oop_Relocation* r = iter.oop_reloc(); 2627 oop_Relocation* r = iter.oop_reloc();
2524 oop obj = r->oop_value(); 2628 oop obj = r->oop_value();
2525 st.print("oop("); 2629 st.print("oop(");
2630 if (obj == NULL) st.print("NULL");
2631 else obj->print_value_on(&st);
2632 st.print(")");
2633 return st.as_string();
2634 }
2635 case relocInfo::metadata_type: {
2636 stringStream st;
2637 metadata_Relocation* r = iter.metadata_reloc();
2638 Metadata* obj = r->metadata_value();
2639 st.print("metadata(");
2526 if (obj == NULL) st.print("NULL"); 2640 if (obj == NULL) st.print("NULL");
2527 else obj->print_value_on(&st); 2641 else obj->print_value_on(&st);
2528 st.print(")"); 2642 st.print(")");
2529 return st.as_string(); 2643 return st.as_string();
2530 } 2644 }
2688 if (sd != NULL) { 2802 if (sd != NULL) {
2689 st->move_to(column); 2803 st->move_to(column);
2690 if (sd->bci() == SynchronizationEntryBCI) { 2804 if (sd->bci() == SynchronizationEntryBCI) {
2691 st->print(";*synchronization entry"); 2805 st->print(";*synchronization entry");
2692 } else { 2806 } else {
2693 if (sd->method().is_null()) { 2807 if (sd->method() == NULL) {
2694 st->print("method is NULL"); 2808 st->print("method is NULL");
2695 } else if (sd->method()->is_native()) { 2809 } else if (sd->method()->is_native()) {
2696 st->print("method is native"); 2810 st->print("method is native");
2697 } else { 2811 } else {
2698 Bytecodes::Code bc = sd->method()->java_code_at(sd->bci()); 2812 Bytecodes::Code bc = sd->method()->java_code_at(sd->bci());
2729 2843
2730 // Print all scopes 2844 // Print all scopes
2731 for (;sd != NULL; sd = sd->sender()) { 2845 for (;sd != NULL; sd = sd->sender()) {
2732 st->move_to(column); 2846 st->move_to(column);
2733 st->print("; -"); 2847 st->print("; -");
2734 if (sd->method().is_null()) { 2848 if (sd->method() == NULL) {
2735 st->print("method is NULL"); 2849 st->print("method is NULL");
2736 } else { 2850 } else {
2737 sd->method()->print_short_name(st); 2851 sd->method()->print_short_name(st);
2738 } 2852 }
2739 int lineno = sd->method()->line_number_from_bci(sd->bci()); 2853 int lineno = sd->method()->line_number_from_bci(sd->bci());