comparison src/share/vm/code/nmethod.cpp @ 7066:7d815d842ee0

Merge.
author Christian Haeubl <haeubl@ssw.jku.at>
date Fri, 23 Nov 2012 11:50:27 +0100
parents 8c5333c80cfd 45029b3ac59f
children 97d0eae99568
comparison
equal deleted inserted replaced
7065:cfacf5d5bade 7066:7d815d842ee0
32 #include "compiler/compileBroker.hpp" 32 #include "compiler/compileBroker.hpp"
33 #include "compiler/compileLog.hpp" 33 #include "compiler/compileLog.hpp"
34 #include "compiler/compilerOracle.hpp" 34 #include "compiler/compilerOracle.hpp"
35 #include "compiler/disassembler.hpp" 35 #include "compiler/disassembler.hpp"
36 #include "interpreter/bytecode.hpp" 36 #include "interpreter/bytecode.hpp"
37 #include "oops/methodDataOop.hpp" 37 #include "oops/methodData.hpp"
38 #include "prims/jvmtiRedefineClassesTrace.hpp" 38 #include "prims/jvmtiRedefineClassesTrace.hpp"
39 #include "prims/jvmtiImpl.hpp" 39 #include "prims/jvmtiImpl.hpp"
40 #include "runtime/sharedRuntime.hpp" 40 #include "runtime/sharedRuntime.hpp"
41 #include "runtime/sweeper.hpp" 41 #include "runtime/sweeper.hpp"
42 #include "utilities/dtrace.hpp" 42 #include "utilities/dtrace.hpp"
43 #include "utilities/events.hpp" 43 #include "utilities/events.hpp"
44 #include "utilities/xmlstream.hpp" 44 #include "utilities/xmlstream.hpp"
45 #include "utilities/debug.hpp"
45 #include "utilities/machineCodePrinter.hpp" 46 #include "utilities/machineCodePrinter.hpp"
46 #ifdef SHARK 47 #ifdef SHARK
47 #include "shark/sharkCompiler.hpp" 48 #include "shark/sharkCompiler.hpp"
48 #endif 49 #endif
49 #ifdef GRAAL 50 #ifdef GRAAL
61 HS_DTRACE_PROBE_DECL6(hotspot, compiled__method__unload, 62 HS_DTRACE_PROBE_DECL6(hotspot, compiled__method__unload,
62 char*, int, char*, int, char*, int); 63 char*, int, char*, int, char*, int);
63 64
64 #define DTRACE_METHOD_UNLOAD_PROBE(method) \ 65 #define DTRACE_METHOD_UNLOAD_PROBE(method) \
65 { \ 66 { \
66 methodOop m = (method); \ 67 Method* m = (method); \
67 if (m != NULL) { \ 68 if (m != NULL) { \
68 Symbol* klass_name = m->klass_name(); \ 69 Symbol* klass_name = m->klass_name(); \
69 Symbol* name = m->name(); \ 70 Symbol* name = m->name(); \
70 Symbol* signature = m->signature(); \ 71 Symbol* signature = m->signature(); \
71 HS_DTRACE_PROBE6(hotspot, compiled__method__unload, \ 72 HS_DTRACE_PROBE6(hotspot, compiled__method__unload, \
75 } \ 76 } \
76 } 77 }
77 #else /* USDT2 */ 78 #else /* USDT2 */
78 #define DTRACE_METHOD_UNLOAD_PROBE(method) \ 79 #define DTRACE_METHOD_UNLOAD_PROBE(method) \
79 { \ 80 { \
80 methodOop m = (method); \ 81 Method* m = (method); \
81 if (m != NULL) { \ 82 if (m != NULL) { \
82 Symbol* klass_name = m->klass_name(); \ 83 Symbol* klass_name = m->klass_name(); \
83 Symbol* name = m->name(); \ 84 Symbol* name = m->name(); \
84 Symbol* signature = m->signature(); \ 85 Symbol* signature = m->signature(); \
85 HOTSPOT_COMPILED_METHOD_UNLOAD( \ 86 HOTSPOT_COMPILED_METHOD_UNLOAD( \
463 _has_flushed_dependencies = 0; 464 _has_flushed_dependencies = 0;
464 _speculatively_disconnected = 0; 465 _speculatively_disconnected = 0;
465 _has_unsafe_access = 0; 466 _has_unsafe_access = 0;
466 _has_method_handle_invokes = 0; 467 _has_method_handle_invokes = 0;
467 _lazy_critical_native = 0; 468 _lazy_critical_native = 0;
469 _has_wide_vectors = 0;
468 _marked_for_deoptimization = 0; 470 _marked_for_deoptimization = 0;
469 _lock_count = 0; 471 _lock_count = 0;
470 _stack_traversal_mark = 0; 472 _stack_traversal_mark = 0;
471 _unload_reported = false; // jvmti state 473 _unload_reported = false; // jvmti state
472 474
479 _osr_link = NULL; 481 _osr_link = NULL;
480 _scavenge_root_link = NULL; 482 _scavenge_root_link = NULL;
481 _scavenge_root_state = 0; 483 _scavenge_root_state = 0;
482 _saved_nmethod_link = NULL; 484 _saved_nmethod_link = NULL;
483 _compiler = NULL; 485 _compiler = NULL;
484 486 #ifdef GRAAL
485 _graal_compiled_method = NULL; 487 _graal_installed_code = NULL;
486 488 #endif
487 #ifdef HAVE_DTRACE_H 489 #ifdef HAVE_DTRACE_H
488 _trap_offset = 0; 490 _trap_offset = 0;
489 #endif // def HAVE_DTRACE_H 491 #endif // def HAVE_DTRACE_H
490 } 492 }
491 493
497 int frame_complete, 499 int frame_complete,
498 int frame_size, 500 int frame_size,
499 ByteSize basic_lock_owner_sp_offset, 501 ByteSize basic_lock_owner_sp_offset,
500 ByteSize basic_lock_sp_offset, 502 ByteSize basic_lock_sp_offset,
501 OopMapSet* oop_maps) { 503 OopMapSet* oop_maps) {
504 code_buffer->finalize_oop_references(method);
502 // create nmethod 505 // create nmethod
503 nmethod* nm = NULL; 506 nmethod* nm = NULL;
504 { 507 {
505 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 508 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
506 int native_nmethod_size = allocation_size(code_buffer, sizeof(nmethod)); 509 int native_nmethod_size = allocation_size(code_buffer, sizeof(nmethod));
535 CodeBuffer *code_buffer, 538 CodeBuffer *code_buffer,
536 int vep_offset, 539 int vep_offset,
537 int trap_offset, 540 int trap_offset,
538 int frame_complete, 541 int frame_complete,
539 int frame_size) { 542 int frame_size) {
543 code_buffer->finalize_oop_references(method);
540 // create nmethod 544 // create nmethod
541 nmethod* nm = NULL; 545 nmethod* nm = NULL;
542 { 546 {
543 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 547 MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
544 int nmethod_size = allocation_size(code_buffer, sizeof(nmethod)); 548 int nmethod_size = allocation_size(code_buffer, sizeof(nmethod));
576 OopMapSet* oop_maps, 580 OopMapSet* oop_maps,
577 ExceptionHandlerTable* handler_table, 581 ExceptionHandlerTable* handler_table,
578 ImplicitExceptionTable* nul_chk_table, 582 ImplicitExceptionTable* nul_chk_table,
579 AbstractCompiler* compiler, 583 AbstractCompiler* compiler,
580 int comp_level 584 int comp_level
585 #ifdef GRAAL
586 , Handle installed_code
587 #endif
581 ) 588 )
582 { 589 {
583 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR"); 590 assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
591 code_buffer->finalize_oop_references(method);
584 // create nmethod 592 // create nmethod
585 nmethod* nm = NULL; 593 nmethod* nm = NULL;
586 { MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); 594 { MutexLockerEx mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
587 int nmethod_size = 595 int nmethod_size =
588 allocation_size(code_buffer, sizeof(nmethod)) 596 allocation_size(code_buffer, sizeof(nmethod))
596 orig_pc_offset, debug_info, dependencies, code_buffer, frame_size, 604 orig_pc_offset, debug_info, dependencies, code_buffer, frame_size,
597 oop_maps, 605 oop_maps,
598 handler_table, 606 handler_table,
599 nul_chk_table, 607 nul_chk_table,
600 compiler, 608 compiler,
601 comp_level); 609 comp_level
610 #ifdef GRAAL
611 , installed_code
612 #endif
613 );
602 if (nm != NULL) { 614 if (nm != NULL) {
603 // To make dependency checking during class loading fast, record 615 // To make dependency checking during class loading fast, record
604 // the nmethod dependencies in the classes it is dependent on. 616 // the nmethod dependencies in the classes it is dependent on.
605 // This allows the dependency checking code to simply walk the 617 // This allows the dependency checking code to simply walk the
606 // class hierarchy above the loaded class, checking only nmethods 618 // class hierarchy above the loaded class, checking only nmethods
607 // which are dependent on those classes. The slow way is to 619 // which are dependent on those classes. The slow way is to
608 // check every nmethod for dependencies which makes it linear in 620 // check every nmethod for dependencies which makes it linear in
609 // the number of methods compiled. For applications with a lot 621 // the number of methods compiled. For applications with a lot
610 // classes the slow way is too slow. 622 // classes the slow way is too slow.
611 for (Dependencies::DepStream deps(nm); deps.next(); ) { 623 for (Dependencies::DepStream deps(nm); deps.next(); ) {
612 klassOop klass = deps.context_type(); 624 Klass* klass = deps.context_type();
613 if (klass == NULL) continue; // ignore things like evol_method 625 if (klass == NULL) continue; // ignore things like evol_method
614 626
615 // record this nmethod as dependent on this klass 627 // record this nmethod as dependent on this klass
616 instanceKlass::cast(klass)->add_dependent_nmethod(nm); 628 InstanceKlass::cast(klass)->add_dependent_nmethod(nm);
617 } 629 }
618 } 630 }
619 if (nm != NULL) nmethod_stats.note_nmethod(nm); 631 if (nm != NULL) nmethod_stats.note_nmethod(nm);
620 if (PrintAssembly && nm != NULL) 632 if (PrintAssembly && nm != NULL)
621 Disassembler::decode(nm); 633 Disassembler::decode(nm);
637 } 649 }
638 650
639 651
640 // For native wrappers 652 // For native wrappers
641 nmethod::nmethod( 653 nmethod::nmethod(
642 methodOop method, 654 Method* method,
643 int nmethod_size, 655 int nmethod_size,
644 int compile_id, 656 int compile_id,
645 CodeOffsets* offsets, 657 CodeOffsets* offsets,
646 CodeBuffer* code_buffer, 658 CodeBuffer* code_buffer,
647 int frame_size, 659 int frame_size,
668 _orig_pc_offset = 0; 680 _orig_pc_offset = 0;
669 681
670 _consts_offset = data_offset(); 682 _consts_offset = data_offset();
671 _stub_offset = data_offset(); 683 _stub_offset = data_offset();
672 _oops_offset = data_offset(); 684 _oops_offset = data_offset();
673 _scopes_data_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize); 685 _metadata_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize);
686 _scopes_data_offset = _metadata_offset + round_to(code_buffer->total_metadata_size(), wordSize);
674 _scopes_pcs_offset = _scopes_data_offset; 687 _scopes_pcs_offset = _scopes_data_offset;
675 _dependencies_offset = _scopes_pcs_offset; 688 _dependencies_offset = _scopes_pcs_offset;
676 _handler_table_offset = _dependencies_offset; 689 _handler_table_offset = _dependencies_offset;
677 _nul_chk_table_offset = _handler_table_offset; 690 _nul_chk_table_offset = _handler_table_offset;
678 _nmethod_end_offset = _nul_chk_table_offset; 691 _nmethod_end_offset = _nul_chk_table_offset;
682 _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry); 695 _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry);
683 _osr_entry_point = NULL; 696 _osr_entry_point = NULL;
684 _exception_cache = NULL; 697 _exception_cache = NULL;
685 _pc_desc_cache.reset_to(NULL); 698 _pc_desc_cache.reset_to(NULL);
686 699
687 code_buffer->copy_oops_to(this); 700 code_buffer->copy_values_to(this);
688 if (ScavengeRootsInCode && detect_scavenge_root_oops()) { 701 if (ScavengeRootsInCode && detect_scavenge_root_oops()) {
689 CodeCache::add_scavenge_root_nmethod(this); 702 CodeCache::add_scavenge_root_nmethod(this);
690 } 703 }
691 debug_only(verify_scavenge_root_oops()); 704 debug_only(verify_scavenge_root_oops());
692 CodeCache::commit(this); 705 CodeCache::commit(this);
706 // print the header part first 719 // print the header part first
707 print(); 720 print();
708 // then print the requested information 721 // then print the requested information
709 if (PrintNativeNMethods) { 722 if (PrintNativeNMethods) {
710 print_code(); 723 print_code();
711 oop_maps->print(); 724 if (oop_maps != NULL) {
725 oop_maps->print();
726 }
712 } 727 }
713 if (PrintRelocations) { 728 if (PrintRelocations) {
714 print_relocations(); 729 print_relocations();
715 } 730 }
716 if (xtty != NULL) { 731 if (xtty != NULL) {
720 } 735 }
721 736
722 // For dtrace wrappers 737 // For dtrace wrappers
723 #ifdef HAVE_DTRACE_H 738 #ifdef HAVE_DTRACE_H
724 nmethod::nmethod( 739 nmethod::nmethod(
725 methodOop method, 740 Method* method,
726 int nmethod_size, 741 int nmethod_size,
727 CodeOffsets* offsets, 742 CodeOffsets* offsets,
728 CodeBuffer* code_buffer, 743 CodeBuffer* code_buffer,
729 int frame_size) 744 int frame_size)
730 : CodeBlob("dtrace nmethod", code_buffer, sizeof(nmethod), 745 : CodeBlob("dtrace nmethod", code_buffer, sizeof(nmethod),
748 _trap_offset = offsets->value(CodeOffsets::Dtrace_trap); 763 _trap_offset = offsets->value(CodeOffsets::Dtrace_trap);
749 _orig_pc_offset = 0; 764 _orig_pc_offset = 0;
750 _consts_offset = data_offset(); 765 _consts_offset = data_offset();
751 _stub_offset = data_offset(); 766 _stub_offset = data_offset();
752 _oops_offset = data_offset(); 767 _oops_offset = data_offset();
753 _scopes_data_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize); 768 _metadata_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize);
769 _scopes_data_offset = _metadata_offset + round_to(code_buffer->total_metadata_size(), wordSize);
754 _scopes_pcs_offset = _scopes_data_offset; 770 _scopes_pcs_offset = _scopes_data_offset;
755 _dependencies_offset = _scopes_pcs_offset; 771 _dependencies_offset = _scopes_pcs_offset;
756 _handler_table_offset = _dependencies_offset; 772 _handler_table_offset = _dependencies_offset;
757 _nul_chk_table_offset = _handler_table_offset; 773 _nul_chk_table_offset = _handler_table_offset;
758 _nmethod_end_offset = _nul_chk_table_offset; 774 _nmethod_end_offset = _nul_chk_table_offset;
762 _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry); 778 _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry);
763 _osr_entry_point = NULL; 779 _osr_entry_point = NULL;
764 _exception_cache = NULL; 780 _exception_cache = NULL;
765 _pc_desc_cache.reset_to(NULL); 781 _pc_desc_cache.reset_to(NULL);
766 782
767 code_buffer->copy_oops_to(this); 783 code_buffer->copy_values_to(this);
768 debug_only(verify_scavenge_root_oops()); 784 debug_only(verify_scavenge_root_oops());
769 CodeCache::commit(this); 785 CodeCache::commit(this);
770 } 786 }
771 787
772 if (PrintNMethods || PrintDebugInfo || PrintRelocations || PrintDependencies) { 788 if (PrintNMethods || PrintDebugInfo || PrintRelocations || PrintDependencies) {
802 return CodeCache::allocate(nmethod_size); 818 return CodeCache::allocate(nmethod_size);
803 } 819 }
804 820
805 821
806 nmethod::nmethod( 822 nmethod::nmethod(
807 methodOop method, 823 Method* method,
808 int nmethod_size, 824 int nmethod_size,
809 int compile_id, 825 int compile_id,
810 int entry_bci, 826 int entry_bci,
811 CodeOffsets* offsets, 827 CodeOffsets* offsets,
812 int orig_pc_offset, 828 int orig_pc_offset,
817 OopMapSet* oop_maps, 833 OopMapSet* oop_maps,
818 ExceptionHandlerTable* handler_table, 834 ExceptionHandlerTable* handler_table,
819 ImplicitExceptionTable* nul_chk_table, 835 ImplicitExceptionTable* nul_chk_table,
820 AbstractCompiler* compiler, 836 AbstractCompiler* compiler,
821 int comp_level 837 int comp_level
838 #ifdef GRAAL
839 , Handle installed_code
840 #endif
822 ) 841 )
823 : CodeBlob("nmethod", code_buffer, sizeof(nmethod), 842 : CodeBlob("nmethod", code_buffer, sizeof(nmethod),
824 nmethod_size, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps), 843 nmethod_size, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps),
825 _native_receiver_sp_offset(in_ByteSize(-1)), 844 _native_receiver_sp_offset(in_ByteSize(-1)),
826 _native_basic_lock_sp_offset(in_ByteSize(-1)) 845 _native_basic_lock_sp_offset(in_ByteSize(-1))
841 // Section offsets 860 // Section offsets
842 _consts_offset = content_offset() + code_buffer->total_offset_of(code_buffer->consts()); 861 _consts_offset = content_offset() + code_buffer->total_offset_of(code_buffer->consts());
843 _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs()); 862 _stub_offset = content_offset() + code_buffer->total_offset_of(code_buffer->stubs());
844 863
845 #ifdef GRAAL 864 #ifdef GRAAL
846 // graal produces no (!) stub section 865 _graal_installed_code = installed_code();
847 if (offsets->value(CodeOffsets::Exceptions) != -1) { 866
848 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions); 867 // graal produces no (!) stub section
849 } else { 868 if (offsets->value(CodeOffsets::Exceptions) != -1) {
850 _exception_offset = -1; 869 _exception_offset = code_offset() + offsets->value(CodeOffsets::Exceptions);
851 } 870 } else {
852 if (offsets->value(CodeOffsets::Deopt) != -1) { 871 _exception_offset = -1;
853 _deoptimize_offset = code_offset() + offsets->value(CodeOffsets::Deopt); 872 }
854 } else { 873 if (offsets->value(CodeOffsets::Deopt) != -1) {
855 _deoptimize_offset = -1; 874 _deoptimize_offset = code_offset() + offsets->value(CodeOffsets::Deopt);
856 } 875 } else {
857 if (offsets->value(CodeOffsets::DeoptMH) != -1) { 876 _deoptimize_offset = -1;
858 _deoptimize_mh_offset = code_offset() + offsets->value(CodeOffsets::DeoptMH); 877 }
859 } else { 878 if (offsets->value(CodeOffsets::DeoptMH) != -1) {
860 _deoptimize_mh_offset = -1; 879 _deoptimize_mh_offset = code_offset() + offsets->value(CodeOffsets::DeoptMH);
861 } 880 } else {
881 _deoptimize_mh_offset = -1;
882 }
862 #else 883 #else
863 // Exception handler and deopt handler are in the stub section 884 // Exception handler and deopt handler are in the stub section
864 assert(offsets->value(CodeOffsets::Exceptions) != -1, "must be set"); 885 assert(offsets->value(CodeOffsets::Exceptions) != -1, "must be set");
865 assert(offsets->value(CodeOffsets::Deopt ) != -1, "must be set"); 886 assert(offsets->value(CodeOffsets::Deopt ) != -1, "must be set");
866 887
877 } else { 898 } else {
878 _unwind_handler_offset = -1; 899 _unwind_handler_offset = -1;
879 } 900 }
880 901
881 _oops_offset = data_offset(); 902 _oops_offset = data_offset();
882 _scopes_data_offset = _oops_offset + round_to(code_buffer->total_oop_size (), oopSize); 903 _metadata_offset = _oops_offset + round_to(code_buffer->total_oop_size(), oopSize);
904 _scopes_data_offset = _metadata_offset + round_to(code_buffer->total_metadata_size(), wordSize);
905
883 _scopes_pcs_offset = _scopes_data_offset + round_to(debug_info->data_size (), oopSize); 906 _scopes_pcs_offset = _scopes_data_offset + round_to(debug_info->data_size (), oopSize);
884 _dependencies_offset = _scopes_pcs_offset + adjust_pcs_size(debug_info->pcs_size()); 907 _dependencies_offset = _scopes_pcs_offset + adjust_pcs_size(debug_info->pcs_size());
885 _handler_table_offset = _dependencies_offset + round_to(dependencies->size_in_bytes (), oopSize); 908 _handler_table_offset = _dependencies_offset + round_to(dependencies->size_in_bytes (), oopSize);
886 _nul_chk_table_offset = _handler_table_offset + round_to(handler_table->size_in_bytes(), oopSize); 909 _nul_chk_table_offset = _handler_table_offset + round_to(handler_table->size_in_bytes(), oopSize);
887 _nmethod_end_offset = _nul_chk_table_offset + round_to(nul_chk_table->size_in_bytes(), oopSize); 910 _nmethod_end_offset = _nul_chk_table_offset + round_to(nul_chk_table->size_in_bytes(), oopSize);
891 _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry); 914 _osr_entry_point = code_begin() + offsets->value(CodeOffsets::OSR_Entry);
892 _exception_cache = NULL; 915 _exception_cache = NULL;
893 _pc_desc_cache.reset_to(scopes_pcs_begin()); 916 _pc_desc_cache.reset_to(scopes_pcs_begin());
894 917
895 // Copy contents of ScopeDescRecorder to nmethod 918 // Copy contents of ScopeDescRecorder to nmethod
896 code_buffer->copy_oops_to(this); 919 code_buffer->copy_values_to(this);
897 debug_info->copy_to(this); 920 debug_info->copy_to(this);
898 dependencies->copy_to(this); 921 dependencies->copy_to(this);
899 if (ScavengeRootsInCode && detect_scavenge_root_oops()) { 922 if (ScavengeRootsInCode && detect_scavenge_root_oops()) {
900 CodeCache::add_scavenge_root_nmethod(this); 923 CodeCache::add_scavenge_root_nmethod(this);
901 } 924 }
1033 (*dest) = JNIHandles::resolve_non_null(handle); 1056 (*dest) = JNIHandles::resolve_non_null(handle);
1034 } 1057 }
1035 } 1058 }
1036 1059
1037 1060
1038 void nmethod::copy_oops(GrowableArray<jobject>* array) { 1061 // Have to have the same name because it's called by a template
1039 //assert(oops_size() == 0, "do this handshake just once, please"); 1062 void nmethod::copy_values(GrowableArray<jobject>* array) {
1040 int length = array->length(); 1063 int length = array->length();
1041 assert((address)(oops_begin() + length) <= data_end(), "oops big enough"); 1064 assert((address)(oops_begin() + length) <= (address)oops_end(), "oops big enough");
1042 oop* dest = oops_begin(); 1065 oop* dest = oops_begin();
1043 for (int index = 0 ; index < length; index++) { 1066 for (int index = 0 ; index < length; index++) {
1044 initialize_immediate_oop(&dest[index], array->at(index)); 1067 initialize_immediate_oop(&dest[index], array->at(index));
1045 } 1068 }
1046 1069
1050 // CodeBlob constructor, so it is valid even at this early point to 1073 // CodeBlob constructor, so it is valid even at this early point to
1051 // iterate over relocations and patch the code. 1074 // iterate over relocations and patch the code.
1052 fix_oop_relocations(NULL, NULL, /*initialize_immediates=*/ true); 1075 fix_oop_relocations(NULL, NULL, /*initialize_immediates=*/ true);
1053 } 1076 }
1054 1077
1078 void nmethod::copy_values(GrowableArray<Metadata*>* array) {
1079 int length = array->length();
1080 assert((address)(metadata_begin() + length) <= (address)metadata_end(), "big enough");
1081 Metadata** dest = metadata_begin();
1082 for (int index = 0 ; index < length; index++) {
1083 dest[index] = array->at(index);
1084 }
1085 }
1055 1086
1056 bool nmethod::is_at_poll_return(address pc) { 1087 bool nmethod::is_at_poll_return(address pc) {
1057 RelocIterator iter(this, pc, pc+1); 1088 RelocIterator iter(this, pc, pc+1);
1058 while (iter.next()) { 1089 while (iter.next()) {
1059 if (iter.type() == relocInfo::poll_return_type) 1090 if (iter.type() == relocInfo::poll_return_type)
1084 oop* dest = reloc->oop_addr(); 1115 oop* dest = reloc->oop_addr();
1085 initialize_immediate_oop(dest, (jobject) *dest); 1116 initialize_immediate_oop(dest, (jobject) *dest);
1086 } 1117 }
1087 // Refresh the oop-related bits of this instruction. 1118 // Refresh the oop-related bits of this instruction.
1088 reloc->fix_oop_relocation(); 1119 reloc->fix_oop_relocation();
1120 } else if (iter.type() == relocInfo::metadata_type) {
1121 metadata_Relocation* reloc = iter.metadata_reloc();
1122 reloc->fix_metadata_relocation();
1089 } 1123 }
1090 1124
1091 // There must not be any interfering patches or breakpoints. 1125 // There must not be any interfering patches or breakpoints.
1092 assert(!(iter.type() == relocInfo::breakpoint_type 1126 assert(!(iter.type() == relocInfo::breakpoint_type
1093 && iter.breakpoint_reloc()->active()), 1127 && iter.breakpoint_reloc()->active()),
1204 void nmethod::inc_decompile_count() { 1238 void nmethod::inc_decompile_count() {
1205 #ifndef GRAAL 1239 #ifndef GRAAL
1206 if (!is_compiled_by_c2()) return; 1240 if (!is_compiled_by_c2()) return;
1207 #endif 1241 #endif
1208 // Could be gated by ProfileTraps, but do not bother... 1242 // Could be gated by ProfileTraps, but do not bother...
1209 methodOop m = method(); 1243 Method* m = method();
1210 if (m == NULL) return; 1244 if (m == NULL) return;
1211 methodDataOop mdo = m->method_data(); 1245 MethodData* mdo = m->method_data();
1212 if (mdo == NULL) return; 1246 if (mdo == NULL) return;
1213 // There is a benign race here. See comments in methodDataOop.hpp. 1247 // There is a benign race here. See comments in methodData.hpp.
1214 mdo->inc_decompile_count(); 1248 mdo->inc_decompile_count();
1215 } 1249 }
1216 1250
1217 void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) { 1251 void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) {
1218 1252
1227 flush_dependencies(is_alive); 1261 flush_dependencies(is_alive);
1228 1262
1229 // Break cycle between nmethod & method 1263 // Break cycle between nmethod & method
1230 if (TraceClassUnloading && WizardMode) { 1264 if (TraceClassUnloading && WizardMode) {
1231 tty->print_cr("[Class unloading: Making nmethod " INTPTR_FORMAT 1265 tty->print_cr("[Class unloading: Making nmethod " INTPTR_FORMAT
1232 " unloadable], methodOop(" INTPTR_FORMAT 1266 " unloadable], Method*(" INTPTR_FORMAT
1233 "), cause(" INTPTR_FORMAT ")", 1267 "), cause(" INTPTR_FORMAT ")",
1234 this, (address)_method, (address)cause); 1268 this, (address)_method, (address)cause);
1235 if (!Universe::heap()->is_gc_active()) 1269 if (!Universe::heap()->is_gc_active())
1236 cause->klass()->print(); 1270 cause->klass()->print();
1237 } 1271 }
1238 // Unlink the osr method, so we do not look this up again 1272 // Unlink the osr method, so we do not look this up again
1239 if (is_osr_method()) { 1273 if (is_osr_method()) {
1240 invalidate_osr_method(); 1274 invalidate_osr_method();
1241 } 1275 }
1242 // If _method is already NULL the methodOop is about to be unloaded, 1276 // If _method is already NULL the Method* is about to be unloaded,
1243 // so we don't have to break the cycle. Note that it is possible to 1277 // so we don't have to break the cycle. Note that it is possible to
1244 // have the methodOop live here, in case we unload the nmethod because 1278 // have the Method* live here, in case we unload the nmethod because
1245 // it is pointing to some oop (other than the methodOop) being unloaded. 1279 // it is pointing to some oop (other than the Method*) being unloaded.
1246 if (_method != NULL) { 1280 if (_method != NULL) {
1247 // OSR methods point to the methodOop, but the methodOop does not 1281 // OSR methods point to the Method*, but the Method* does not
1248 // point back! 1282 // point back!
1249 if (_method->code() == this) { 1283 if (_method->code() == this) {
1250 _method->clear_code(); // Break a cycle 1284 _method->clear_code(); // Break a cycle
1251 } 1285 }
1252 _method = NULL; // Clear the method of this dead nmethod 1286 _method = NULL; // Clear the method of this dead nmethod
1253 } 1287 }
1254 1288
1255 #ifdef GRAAL 1289 #ifdef GRAAL
1256 if (_graal_compiled_method != NULL) { 1290 if (_graal_installed_code != NULL) {
1257 HotSpotCompiledMethod::set_nmethod(_graal_compiled_method, 0); 1291 HotSpotInstalledCode::set_nmethod(_graal_installed_code, 0);
1258 _graal_compiled_method = NULL; 1292 _graal_installed_code = NULL;
1259 } 1293 }
1260 #endif 1294 #endif
1261 1295
1262 // Make the class unloaded - i.e., change state and notify sweeper 1296 // Make the class unloaded - i.e., change state and notify sweeper
1263 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint"); 1297 assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint");
1270 _state = unloaded; 1304 _state = unloaded;
1271 1305
1272 // Log the unloading. 1306 // Log the unloading.
1273 log_state_change(); 1307 log_state_change();
1274 1308
1275 // The methodOop is gone at this point 1309 // The Method* is gone at this point
1276 assert(_method == NULL, "Tautology"); 1310 assert(_method == NULL, "Tautology");
1277 1311
1278 set_osr_link(NULL); 1312 set_osr_link(NULL);
1279 //set_scavenge_root_link(NULL); // done by prune_scavenge_root_nmethods 1313 //set_scavenge_root_link(NULL); // done by prune_scavenge_root_nmethods
1280 NMethodSweeper::notify(this); 1314 NMethodSweeper::notify(this);
1282 1316
1283 void nmethod::invalidate_osr_method() { 1317 void nmethod::invalidate_osr_method() {
1284 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod"); 1318 assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
1285 // Remove from list of active nmethods 1319 // Remove from list of active nmethods
1286 if (method() != NULL) 1320 if (method() != NULL)
1287 instanceKlass::cast(method()->method_holder())->remove_osr_nmethod(this); 1321 method()->method_holder()->remove_osr_nmethod(this);
1288 // Set entry as invalid 1322 // Set entry as invalid
1289 _entry_bci = InvalidOSREntryBci; 1323 _entry_bci = InvalidOSREntryBci;
1290 } 1324 }
1291 1325
1292 void nmethod::log_state_change() const { 1326 void nmethod::log_state_change() const {
1339 // to do, but return false to indicate this. 1373 // to do, but return false to indicate this.
1340 return false; 1374 return false;
1341 } 1375 }
1342 1376
1343 #ifdef GRAAL 1377 #ifdef GRAAL
1344 if (_graal_compiled_method != NULL) { 1378 if (_graal_installed_code != NULL) {
1345 HotSpotCompiledMethod::set_nmethod(_graal_compiled_method, 0); 1379 HotSpotInstalledCode::set_nmethod(_graal_installed_code, 0);
1346 _graal_compiled_method = NULL; 1380 _graal_installed_code = NULL;
1347 } 1381 }
1348 #endif 1382 #endif
1349 1383
1350 // The caller can be calling the method statically or through an inline 1384 // The caller can be calling the method statically or through an inline
1351 // cache call. 1385 // cache call.
1367 log_state_change(); 1401 log_state_change();
1368 1402
1369 // Remove nmethod from method. 1403 // Remove nmethod from method.
1370 // We need to check if both the _code and _from_compiled_code_entry_point 1404 // We need to check if both the _code and _from_compiled_code_entry_point
1371 // refer to this nmethod because there is a race in setting these two fields 1405 // refer to this nmethod because there is a race in setting these two fields
1372 // in methodOop as seen in bugid 4947125. 1406 // in Method* as seen in bugid 4947125.
1373 // If the vep() points to the zombie nmethod, the memory for the nmethod 1407 // If the vep() points to the zombie nmethod, the memory for the nmethod
1374 // could be flushed and the compiler and vtable stubs could still call 1408 // could be flushed and the compiler and vtable stubs could still call
1375 // through it. 1409 // through it.
1376 if (method() != NULL && (method()->code() == this || 1410 if (method() != NULL && (method()->code() == this ||
1377 method()->from_compiled_entry() == verified_entry_point())) { 1411 method()->from_compiled_entry() == verified_entry_point())) {
1487 assert(Universe::heap()->is_gc_active() == (is_alive != NULL), 1521 assert(Universe::heap()->is_gc_active() == (is_alive != NULL),
1488 "is_alive is non-NULL if and only if we are called during GC"); 1522 "is_alive is non-NULL if and only if we are called during GC");
1489 if (!has_flushed_dependencies()) { 1523 if (!has_flushed_dependencies()) {
1490 set_has_flushed_dependencies(); 1524 set_has_flushed_dependencies();
1491 for (Dependencies::DepStream deps(this); deps.next(); ) { 1525 for (Dependencies::DepStream deps(this); deps.next(); ) {
1492 klassOop klass = deps.context_type(); 1526 Klass* klass = deps.context_type();
1493 if (klass == NULL) continue; // ignore things like evol_method 1527 if (klass == NULL) continue; // ignore things like evol_method
1494 1528
1495 // During GC the is_alive closure is non-NULL, and is used to 1529 // During GC the is_alive closure is non-NULL, and is used to
1496 // determine liveness of dependees that need to be updated. 1530 // determine liveness of dependees that need to be updated.
1497 if (is_alive == NULL || is_alive->do_object_b(klass)) { 1531 if (is_alive == NULL || klass->is_loader_alive(is_alive)) {
1498 instanceKlass::cast(klass)->remove_dependent_nmethod(this); 1532 InstanceKlass::cast(klass)->remove_dependent_nmethod(this);
1499 } 1533 }
1500 } 1534 }
1501 } 1535 }
1502 } 1536 }
1503 1537
1504 1538
1505 // If this oop is not live, the nmethod can be unloaded. 1539 // If this oop is not live, the nmethod can be unloaded.
1506 bool nmethod::can_unload(BoolObjectClosure* is_alive, 1540 bool nmethod::can_unload(BoolObjectClosure* is_alive, oop* root, bool unloading_occurred) {
1507 OopClosure* keep_alive,
1508 oop* root, bool unloading_occurred) {
1509 assert(root != NULL, "just checking"); 1541 assert(root != NULL, "just checking");
1510 oop obj = *root; 1542 oop obj = *root;
1511 if (obj == NULL || is_alive->do_object_b(obj)) { 1543 if (obj == NULL || is_alive->do_object_b(obj)) {
1512 return false; 1544 return false;
1513 } 1545 }
1514 if (obj->is_compiledICHolder()) { 1546
1515 compiledICHolderOop cichk_oop = compiledICHolderOop(obj);
1516 if (is_alive->do_object_b(
1517 cichk_oop->holder_method()->method_holder()) &&
1518 is_alive->do_object_b(cichk_oop->holder_klass())) {
1519 // The oop should be kept alive
1520 keep_alive->do_oop(root);
1521 return false;
1522 }
1523 }
1524 // If ScavengeRootsInCode is true, an nmethod might be unloaded 1547 // If ScavengeRootsInCode is true, an nmethod might be unloaded
1525 // simply because one of its constant oops has gone dead. 1548 // simply because one of its constant oops has gone dead.
1526 // No actual classes need to be unloaded in order for this to occur. 1549 // No actual classes need to be unloaded in order for this to occur.
1527 assert(unloading_occurred || ScavengeRootsInCode, "Inconsistency in unloading"); 1550 assert(unloading_occurred || ScavengeRootsInCode, "Inconsistency in unloading");
1528 make_unloaded(is_alive, obj); 1551 make_unloaded(is_alive, obj);
1533 // post_compiled_method_load_event 1556 // post_compiled_method_load_event
1534 // new method for install_code() path 1557 // new method for install_code() path
1535 // Transfer information from compilation to jvmti 1558 // Transfer information from compilation to jvmti
1536 void nmethod::post_compiled_method_load_event() { 1559 void nmethod::post_compiled_method_load_event() {
1537 1560
1538 methodOop moop = method(); 1561 Method* moop = method();
1539 #ifndef USDT2 1562 #ifndef USDT2
1540 HS_DTRACE_PROBE8(hotspot, compiled__method__load, 1563 HS_DTRACE_PROBE8(hotspot, compiled__method__load,
1541 moop->klass_name()->bytes(), 1564 moop->klass_name()->bytes(),
1542 moop->klass_name()->utf8_length(), 1565 moop->klass_name()->utf8_length(),
1543 moop->name()->bytes(), 1566 moop->name()->bytes(),
1588 assert(_method != NULL && !is_unloaded(), "just checking"); 1611 assert(_method != NULL && !is_unloaded(), "just checking");
1589 DTRACE_METHOD_UNLOAD_PROBE(method()); 1612 DTRACE_METHOD_UNLOAD_PROBE(method());
1590 1613
1591 // If a JVMTI agent has enabled the CompiledMethodUnload event then 1614 // If a JVMTI agent has enabled the CompiledMethodUnload event then
1592 // post the event. Sometime later this nmethod will be made a zombie 1615 // post the event. Sometime later this nmethod will be made a zombie
1593 // by the sweeper but the methodOop will not be valid at that point. 1616 // by the sweeper but the Method* will not be valid at that point.
1594 // If the _jmethod_id is null then no load event was ever requested 1617 // If the _jmethod_id is null then no load event was ever requested
1595 // so don't bother posting the unload. The main reason for this is 1618 // so don't bother posting the unload. The main reason for this is
1596 // that the jmethodID is a weak reference to the methodOop so if 1619 // that the jmethodID is a weak reference to the Method* so if
1597 // it's being unloaded there's no way to look it up since the weak 1620 // it's being unloaded there's no way to look it up since the weak
1598 // ref will have been cleared. 1621 // ref will have been cleared.
1599 if (_jmethod_id != NULL && JvmtiExport::should_post_compiled_method_unload()) { 1622 if (_jmethod_id != NULL && JvmtiExport::should_post_compiled_method_unload()) {
1600 assert(!unload_reported(), "already unloaded"); 1623 assert(!unload_reported(), "already unloaded");
1601 JvmtiDeferredEvent event = 1624 JvmtiDeferredEvent event =
1621 1644
1622 // This is called at the end of the strong tracing/marking phase of a 1645 // This is called at the end of the strong tracing/marking phase of a
1623 // GC to unload an nmethod if it contains otherwise unreachable 1646 // GC to unload an nmethod if it contains otherwise unreachable
1624 // oops. 1647 // oops.
1625 1648
1626 void nmethod::do_unloading(BoolObjectClosure* is_alive, 1649 void nmethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
1627 OopClosure* keep_alive, bool unloading_occurred) {
1628 // Make sure the oop's ready to receive visitors 1650 // Make sure the oop's ready to receive visitors
1629 assert(!is_zombie() && !is_unloaded(), 1651 assert(!is_zombie() && !is_unloaded(),
1630 "should not call follow on zombie or unloaded nmethod"); 1652 "should not call follow on zombie or unloaded nmethod");
1631 1653
1632 // If the method is not entrant then a JMP is plastered over the 1654 // If the method is not entrant then a JMP is plastered over the
1649 // call to post_compiled_method_unload() so that the unloading 1671 // call to post_compiled_method_unload() so that the unloading
1650 // of this nmethod is reported. 1672 // of this nmethod is reported.
1651 unloading_occurred = true; 1673 unloading_occurred = true;
1652 } 1674 }
1653 1675
1654 // Follow methodOop 1676 #ifdef GRAAL
1655 if (can_unload(is_alive, keep_alive, (oop*)&_method, unloading_occurred)) { 1677 // Follow Graal method
1678 if (_graal_installed_code != NULL && can_unload(is_alive, (oop*)&_graal_installed_code, unloading_occurred)) {
1656 return; 1679 return;
1657 } 1680 }
1658 1681 #endif
1659 if (_graal_compiled_method != NULL && can_unload(is_alive, keep_alive, (oop*)&_graal_compiled_method, unloading_occurred)) {
1660 return;
1661 }
1662 1682
1663 // Exception cache 1683 // Exception cache
1664 ExceptionCache* ec = exception_cache(); 1684 ExceptionCache* ec = exception_cache();
1665 while (ec != NULL) { 1685 while (ec != NULL) {
1666 oop* ex_addr = (oop*)ec->exception_type_addr(); 1686 Klass* ex_klass = ec->exception_type();
1667 oop ex = *ex_addr;
1668 ExceptionCache* next_ec = ec->next(); 1687 ExceptionCache* next_ec = ec->next();
1669 if (ex != NULL && !is_alive->do_object_b(ex)) { 1688 if (ex_klass != NULL && !ex_klass->is_loader_alive(is_alive)) {
1670 assert(!ex->is_compiledICHolder(), "Possible error here");
1671 remove_from_exception_cache(ec); 1689 remove_from_exception_cache(ec);
1672 } 1690 }
1673 ec = next_ec; 1691 ec = next_ec;
1674 } 1692 }
1675 1693
1680 if (unloading_occurred) { 1698 if (unloading_occurred) {
1681 RelocIterator iter(this, low_boundary); 1699 RelocIterator iter(this, low_boundary);
1682 while(iter.next()) { 1700 while(iter.next()) {
1683 if (iter.type() == relocInfo::virtual_call_type) { 1701 if (iter.type() == relocInfo::virtual_call_type) {
1684 CompiledIC *ic = CompiledIC_at(iter.reloc()); 1702 CompiledIC *ic = CompiledIC_at(iter.reloc());
1685 oop ic_oop = ic->cached_oop(); 1703 if (ic->is_icholder_call()) {
1686 if (ic_oop != NULL && !is_alive->do_object_b(ic_oop)) {
1687 // The only exception is compiledICHolder oops which may 1704 // The only exception is compiledICHolder oops which may
1688 // yet be marked below. (We check this further below). 1705 // yet be marked below. (We check this further below).
1689 if (ic_oop->is_compiledICHolder()) { 1706 CompiledICHolder* cichk_oop = ic->cached_icholder();
1690 compiledICHolderOop cichk_oop = compiledICHolderOop(ic_oop); 1707 if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) &&
1691 if (is_alive->do_object_b( 1708 cichk_oop->holder_klass()->is_loader_alive(is_alive)) {
1692 cichk_oop->holder_method()->method_holder()) &&
1693 is_alive->do_object_b(cichk_oop->holder_klass())) {
1694 continue; 1709 continue;
1695 } 1710 }
1711 } else {
1712 Metadata* ic_oop = ic->cached_metadata();
1713 if (ic_oop != NULL) {
1714 if (ic_oop->is_klass()) {
1715 if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
1716 continue;
1717 }
1718 } else if (ic_oop->is_method()) {
1719 if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
1720 continue;
1721 }
1722 } else {
1723 ShouldNotReachHere();
1724 }
1725 }
1696 } 1726 }
1697 ic->set_to_clean(); 1727 ic->set_to_clean();
1698 assert(ic->cached_oop() == NULL,
1699 "cached oop in IC should be cleared");
1700 }
1701 } 1728 }
1702 } 1729 }
1703 } 1730 }
1704 1731
1705 // Compiled code 1732 // Compiled code
1733 {
1706 RelocIterator iter(this, low_boundary); 1734 RelocIterator iter(this, low_boundary);
1707 while (iter.next()) { 1735 while (iter.next()) {
1708 if (iter.type() == relocInfo::oop_type) { 1736 if (iter.type() == relocInfo::oop_type) {
1709 oop_Relocation* r = iter.oop_reloc(); 1737 oop_Relocation* r = iter.oop_reloc();
1710 // In this loop, we must only traverse those oops directly embedded in 1738 // In this loop, we must only traverse those oops directly embedded in
1711 // the code. Other oops (oop_index>0) are seen as part of scopes_oops. 1739 // the code. Other oops (oop_index>0) are seen as part of scopes_oops.
1712 assert(1 == (r->oop_is_immediate()) + 1740 assert(1 == (r->oop_is_immediate()) +
1713 (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()), 1741 (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
1714 "oop must be found in exactly one place"); 1742 "oop must be found in exactly one place");
1715 if (r->oop_is_immediate() && r->oop_value() != NULL) { 1743 if (r->oop_is_immediate() && r->oop_value() != NULL) {
1716 if (can_unload(is_alive, keep_alive, r->oop_addr(), unloading_occurred)) { 1744 if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
1717 return; 1745 return;
1718 } 1746 }
1719 } 1747 }
1720 } 1748 }
1721 } 1749 }
1750 }
1722 1751
1723 1752
1724 // Scopes 1753 // Scopes
1725 for (oop* p = oops_begin(); p < oops_end(); p++) { 1754 for (oop* p = oops_begin(); p < oops_end(); p++) {
1726 if (*p == Universe::non_oop_word()) continue; // skip non-oops 1755 if (*p == Universe::non_oop_word()) continue; // skip non-oops
1727 if (can_unload(is_alive, keep_alive, p, unloading_occurred)) { 1756 if (can_unload(is_alive, p, unloading_occurred)) {
1728 return; 1757 return;
1729 } 1758 }
1730 } 1759 }
1731 1760
1732 #ifndef PRODUCT 1761 // Ensure that all metadata is still alive
1733 // This nmethod was not unloaded; check below that all CompiledICs 1762 verify_metadata_loaders(low_boundary, is_alive);
1734 // refer to marked oops. 1763 }
1764
1765 #ifdef ASSERT
1766
1767 class CheckClass : AllStatic {
1768 static BoolObjectClosure* _is_alive;
1769
1770 // Check class_loader is alive for this bit of metadata.
1771 static void check_class(Metadata* md) {
1772 Klass* klass = NULL;
1773 if (md->is_klass()) {
1774 klass = ((Klass*)md);
1775 } else if (md->is_method()) {
1776 klass = ((Method*)md)->method_holder();
1777 } else if (md->is_methodData()) {
1778 klass = ((MethodData*)md)->method()->method_holder();
1779 } else {
1780 md->print();
1781 ShouldNotReachHere();
1782 }
1783 assert(klass->is_loader_alive(_is_alive), "must be alive");
1784 }
1785 public:
1786 static void do_check_class(BoolObjectClosure* is_alive, nmethod* nm) {
1787 assert(SafepointSynchronize::is_at_safepoint(), "this is only ok at safepoint");
1788 _is_alive = is_alive;
1789 nm->metadata_do(check_class);
1790 }
1791 };
1792
1793 // This is called during a safepoint so can use static data
1794 BoolObjectClosure* CheckClass::_is_alive = NULL;
1795 #endif // ASSERT
1796
1797
1798 // Processing of oop references should have been sufficient to keep
1799 // all strong references alive. Any weak references should have been
1800 // cleared as well. Visit all the metadata and ensure that it's
1801 // really alive.
1802 void nmethod::verify_metadata_loaders(address low_boundary, BoolObjectClosure* is_alive) {
1803 #ifdef ASSERT
1804 RelocIterator iter(this, low_boundary);
1805 while (iter.next()) {
1806 // static_stub_Relocations may have dangling references to
1807 // Method*s so trim them out here. Otherwise it looks like
1808 // compiled code is maintaining a link to dead metadata.
1809 address static_call_addr = NULL;
1810 if (iter.type() == relocInfo::opt_virtual_call_type) {
1811 CompiledIC* cic = CompiledIC_at(iter.reloc());
1812 if (!cic->is_call_to_interpreted()) {
1813 static_call_addr = iter.addr();
1814 cic->set_to_clean();
1815 }
1816 } else if (iter.type() == relocInfo::static_call_type) {
1817 CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
1818 if (!csc->is_call_to_interpreted()) {
1819 static_call_addr = iter.addr();
1820 csc->set_to_clean();
1821 }
1822 }
1823 if (static_call_addr != NULL) {
1824 RelocIterator sciter(this, low_boundary);
1825 while (sciter.next()) {
1826 if (sciter.type() == relocInfo::static_stub_type &&
1827 sciter.static_stub_reloc()->static_call() == static_call_addr) {
1828 sciter.static_stub_reloc()->clear_inline_cache();
1829 }
1830 }
1831 }
1832 }
1833 // Check that the metadata embedded in the nmethod is alive
1834 CheckClass::do_check_class(is_alive, this);
1835 #endif
1836 }
1837
1838
1839 // Iterate over metadata calling this function. Used by RedefineClasses
1840 void nmethod::metadata_do(void f(Metadata*)) {
1841 address low_boundary = verified_entry_point();
1842 if (is_not_entrant()) {
1843 low_boundary += NativeJump::instruction_size;
1844 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump.
1845 // (See comment above.)
1846 }
1735 { 1847 {
1848 // Visit all immediate references that are embedded in the instruction stream.
1736 RelocIterator iter(this, low_boundary); 1849 RelocIterator iter(this, low_boundary);
1737 while (iter.next()) { 1850 while (iter.next()) {
1738 if (iter.type() == relocInfo::virtual_call_type) { 1851 if (iter.type() == relocInfo::metadata_type ) {
1739 CompiledIC *ic = CompiledIC_at(iter.reloc()); 1852 metadata_Relocation* r = iter.metadata_reloc();
1740 oop ic_oop = ic->cached_oop(); 1853 // In this lmetadata, we must only follow those metadatas directly embedded in
1741 assert(ic_oop == NULL || is_alive->do_object_b(ic_oop), 1854 // the code. Other metadatas (oop_index>0) are seen as part of
1742 "Found unmarked ic_oop in reachable nmethod"); 1855 // the metadata section below.
1743 } 1856 assert(1 == (r->metadata_is_immediate()) +
1744 } 1857 (r->metadata_addr() >= metadata_begin() && r->metadata_addr() < metadata_end()),
1745 } 1858 "metadata must be found in exactly one place");
1746 #endif // !PRODUCT 1859 if (r->metadata_is_immediate() && r->metadata_value() != NULL) {
1747 } 1860 Metadata* md = r->metadata_value();
1861 f(md);
1862 }
1863 }
1864 }
1865 }
1866
1867 // Visit the metadata section
1868 for (Metadata** p = metadata_begin(); p < metadata_end(); p++) {
1869 if (*p == Universe::non_oop_word() || *p == NULL) continue; // skip non-oops
1870 Metadata* md = *p;
1871 f(md);
1872 }
1873 // Call function Method*, not embedded in these other places.
1874 if (_method != NULL) f(_method);
1875 }
1876
1748 1877
1749 // This method is called twice during GC -- once while 1878 // This method is called twice during GC -- once while
1750 // tracing the "active" nmethods on thread stacks during 1879 // tracing the "active" nmethods on thread stacks during
1751 // the (strong) marking phase, and then again when walking 1880 // the (strong) marking phase, and then again when walking
1752 // the code cache contents during the weak roots processing 1881 // the code cache contents during the weak roots processing
1770 low_boundary += NativeJump::instruction_size; 1899 low_boundary += NativeJump::instruction_size;
1771 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump. 1900 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump.
1772 // (See comment above.) 1901 // (See comment above.)
1773 } 1902 }
1774 1903
1775 // Compiled code 1904 #ifdef GRAAL
1776 f->do_oop((oop*) &_method); 1905 if (_graal_installed_code != NULL) {
1777 f->do_oop((oop*) &_graal_compiled_method); 1906 f->do_oop((oop*) &_graal_installed_code);
1778 if (!do_strong_roots_only) { 1907 }
1779 // weak roots processing phase -- update ExceptionCache oops 1908 #endif
1780 ExceptionCache* ec = exception_cache();
1781 while(ec != NULL) {
1782 f->do_oop((oop*)ec->exception_type_addr());
1783 ec = ec->next();
1784 }
1785 } // Else strong roots phase -- skip oops in ExceptionCache
1786 1909
1787 RelocIterator iter(this, low_boundary); 1910 RelocIterator iter(this, low_boundary);
1788 1911
1789 while (iter.next()) { 1912 while (iter.next()) {
1790 if (iter.type() == relocInfo::oop_type ) { 1913 if (iter.type() == relocInfo::oop_type ) {
2115 } 2238 }
2116 } 2239 }
2117 return found_check; 2240 return found_check;
2118 } 2241 }
2119 2242
2120 bool nmethod::is_evol_dependent_on(klassOop dependee) { 2243 bool nmethod::is_evol_dependent_on(Klass* dependee) {
2121 instanceKlass *dependee_ik = instanceKlass::cast(dependee); 2244 InstanceKlass *dependee_ik = InstanceKlass::cast(dependee);
2122 objArrayOop dependee_methods = dependee_ik->methods(); 2245 Array<Method*>* dependee_methods = dependee_ik->methods();
2123 for (Dependencies::DepStream deps(this); deps.next(); ) { 2246 for (Dependencies::DepStream deps(this); deps.next(); ) {
2124 if (deps.type() == Dependencies::evol_method) { 2247 if (deps.type() == Dependencies::evol_method) {
2125 methodOop method = deps.method_argument(0); 2248 Method* method = deps.method_argument(0);
2126 for (int j = 0; j < dependee_methods->length(); j++) { 2249 for (int j = 0; j < dependee_methods->length(); j++) {
2127 if ((methodOop) dependee_methods->obj_at(j) == method) { 2250 if (dependee_methods->at(j) == method) {
2128 // RC_TRACE macro has an embedded ResourceMark 2251 // RC_TRACE macro has an embedded ResourceMark
2129 RC_TRACE(0x01000000, 2252 RC_TRACE(0x01000000,
2130 ("Found evol dependency of nmethod %s.%s(%s) compile_id=%d on method %s.%s(%s)", 2253 ("Found evol dependency of nmethod %s.%s(%s) compile_id=%d on method %s.%s(%s)",
2131 _method->method_holder()->klass_part()->external_name(), 2254 _method->method_holder()->external_name(),
2132 _method->name()->as_C_string(), 2255 _method->name()->as_C_string(),
2133 _method->signature()->as_C_string(), compile_id(), 2256 _method->signature()->as_C_string(), compile_id(),
2134 method->method_holder()->klass_part()->external_name(), 2257 method->method_holder()->external_name(),
2135 method->name()->as_C_string(), 2258 method->name()->as_C_string(),
2136 method->signature()->as_C_string())); 2259 method->signature()->as_C_string()));
2137 if (TraceDependencies || LogCompilation) 2260 if (TraceDependencies || LogCompilation)
2138 deps.log_dependency(dependee); 2261 deps.log_dependency(dependee);
2139 return true; 2262 return true;
2143 } 2266 }
2144 return false; 2267 return false;
2145 } 2268 }
2146 2269
2147 // Called from mark_for_deoptimization, when dependee is invalidated. 2270 // Called from mark_for_deoptimization, when dependee is invalidated.
2148 bool nmethod::is_dependent_on_method(methodOop dependee) { 2271 bool nmethod::is_dependent_on_method(Method* dependee) {
2149 for (Dependencies::DepStream deps(this); deps.next(); ) { 2272 for (Dependencies::DepStream deps(this); deps.next(); ) {
2150 if (deps.type() != Dependencies::evol_method) 2273 if (deps.type() != Dependencies::evol_method)
2151 continue; 2274 continue;
2152 methodOop method = deps.method_argument(0); 2275 Method* method = deps.method_argument(0);
2153 if (method == dependee) return true; 2276 if (method == dependee) return true;
2154 } 2277 }
2155 return false; 2278 return false;
2156 } 2279 }
2157 2280
2286 return; 2409 return;
2287 2410
2288 // Make sure all the entry points are correctly aligned for patching. 2411 // Make sure all the entry points are correctly aligned for patching.
2289 NativeJump::check_verified_entry_alignment(entry_point(), verified_entry_point()); 2412 NativeJump::check_verified_entry_alignment(entry_point(), verified_entry_point());
2290 2413
2291 assert(method()->is_oop(), "must be valid"); 2414 // assert(method()->is_oop(), "must be valid");
2292 2415
2293 ResourceMark rm; 2416 ResourceMark rm;
2294 2417
2295 if (!CodeCache::contains(this)) { 2418 if (!CodeCache::contains(this)) {
2296 fatal(err_msg("nmethod at " INTPTR_FORMAT " not in zone", this)); 2419 fatal(err_msg("nmethod at " INTPTR_FORMAT " not in zone", this));
2326 CompiledIC* ic = NULL; 2449 CompiledIC* ic = NULL;
2327 Thread *cur = Thread::current(); 2450 Thread *cur = Thread::current();
2328 if (CompiledIC_lock->owner() == cur || 2451 if (CompiledIC_lock->owner() == cur ||
2329 ((cur->is_VM_thread() || cur->is_ConcurrentGC_thread()) && 2452 ((cur->is_VM_thread() || cur->is_ConcurrentGC_thread()) &&
2330 SafepointSynchronize::is_at_safepoint())) { 2453 SafepointSynchronize::is_at_safepoint())) {
2331 ic = CompiledIC_at(call_site); 2454 ic = CompiledIC_at(this, call_site);
2332 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops()); 2455 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
2333 } else { 2456 } else {
2334 MutexLocker ml_verify (CompiledIC_lock); 2457 MutexLocker ml_verify (CompiledIC_lock);
2335 ic = CompiledIC_at(call_site); 2458 ic = CompiledIC_at(this, call_site);
2336 } 2459 }
2337 PcDesc* pd = pc_desc_at(ic->end_of_call()); 2460 PcDesc* pd = pc_desc_at(ic->end_of_call());
2338 assert(pd != NULL, "PcDesc must exist"); 2461 assert(pd != NULL, "PcDesc must exist");
2339 for (ScopeDesc* sd = new ScopeDesc(this, pd->scope_decode_offset(), 2462 for (ScopeDesc* sd = new ScopeDesc(this, pd->scope_decode_offset(),
2340 pd->obj_decode_offset(), pd->should_reexecute(), pd->rethrow_exception(), 2463 pd->obj_decode_offset(), pd->should_reexecute(), pd->rethrow_exception(),
2467 stub_size()); 2590 stub_size());
2468 if (oops_size () > 0) tty->print_cr(" oops [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d", 2591 if (oops_size () > 0) tty->print_cr(" oops [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2469 oops_begin(), 2592 oops_begin(),
2470 oops_end(), 2593 oops_end(),
2471 oops_size()); 2594 oops_size());
2595 if (metadata_size () > 0) tty->print_cr(" metadata [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2596 metadata_begin(),
2597 metadata_end(),
2598 metadata_size());
2472 if (scopes_data_size () > 0) tty->print_cr(" scopes data [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d", 2599 if (scopes_data_size () > 0) tty->print_cr(" scopes data [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2473 scopes_data_begin(), 2600 scopes_data_begin(),
2474 scopes_data_end(), 2601 scopes_data_end(),
2475 scopes_data_size()); 2602 scopes_data_size());
2476 if (scopes_pcs_size () > 0) tty->print_cr(" scopes pcs [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d", 2603 if (scopes_pcs_size () > 0) tty->print_cr(" scopes pcs [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
2516 ResourceMark rm; 2643 ResourceMark rm;
2517 ttyLocker ttyl; // keep the following output all in one block 2644 ttyLocker ttyl; // keep the following output all in one block
2518 tty->print_cr("Dependencies:"); 2645 tty->print_cr("Dependencies:");
2519 for (Dependencies::DepStream deps(this); deps.next(); ) { 2646 for (Dependencies::DepStream deps(this); deps.next(); ) {
2520 deps.print_dependency(); 2647 deps.print_dependency();
2521 klassOop ctxk = deps.context_type(); 2648 Klass* ctxk = deps.context_type();
2522 if (ctxk != NULL) { 2649 if (ctxk != NULL) {
2523 Klass* k = Klass::cast(ctxk); 2650 if (ctxk->oop_is_instance() && ((InstanceKlass*)ctxk)->is_dependent_nmethod(this)) {
2524 if (k->oop_is_instance() && ((instanceKlass*)k)->is_dependent_nmethod(this)) { 2651 tty->print_cr(" [nmethod<=klass]%s", ctxk->external_name());
2525 tty->print_cr(" [nmethod<=klass]%s", k->external_name());
2526 } 2652 }
2527 } 2653 }
2528 deps.log_dependency(); // put it into the xml log also 2654 deps.log_dependency(); // put it into the xml log also
2529 } 2655 }
2530 } 2656 }
2582 if (obj == NULL) st.print("NULL"); 2708 if (obj == NULL) st.print("NULL");
2583 else obj->print_value_on(&st); 2709 else obj->print_value_on(&st);
2584 st.print(")"); 2710 st.print(")");
2585 return st.as_string(); 2711 return st.as_string();
2586 } 2712 }
2713 case relocInfo::metadata_type: {
2714 stringStream st;
2715 metadata_Relocation* r = iter.metadata_reloc();
2716 Metadata* obj = r->metadata_value();
2717 st.print("metadata(");
2718 if (obj == NULL) st.print("NULL");
2719 else obj->print_value_on(&st);
2720 st.print(")");
2721 return st.as_string();
2722 }
2587 case relocInfo::virtual_call_type: return "virtual_call"; 2723 case relocInfo::virtual_call_type: return "virtual_call";
2588 case relocInfo::opt_virtual_call_type: return "optimized virtual_call"; 2724 case relocInfo::opt_virtual_call_type: return "optimized virtual_call";
2589 case relocInfo::static_call_type: return "static_call"; 2725 case relocInfo::static_call_type: return "static_call";
2590 case relocInfo::static_stub_type: return "static_stub"; 2726 case relocInfo::static_stub_type: return "static_stub";
2591 case relocInfo::runtime_call_type: return "runtime_call"; 2727 case relocInfo::runtime_call_type: return "runtime_call";
2609 p->return_oop()); 2745 p->return_oop());
2610 } 2746 }
2611 return NULL; 2747 return NULL;
2612 } 2748 }
2613 2749
2614 void nmethod::print_nmethod_labels(outputStream* stream, address block_begin) { 2750 void nmethod::print_nmethod_labels(outputStream* stream, address block_begin) const {
2615 if (block_begin == entry_point()) stream->print_cr("[Entry Point]"); 2751 if (block_begin == entry_point()) stream->print_cr("[Entry Point]");
2616 if (block_begin == verified_entry_point()) stream->print_cr("[Verified Entry Point]"); 2752 if (block_begin == verified_entry_point()) stream->print_cr("[Verified Entry Point]");
2617 if (GRAAL_ONLY(_exception_offset >= 0 &&) block_begin == exception_begin()) stream->print_cr("[Exception Handler]"); 2753 if (GRAAL_ONLY(_exception_offset >= 0 &&) block_begin == exception_begin()) stream->print_cr("[Exception Handler]");
2618 if (block_begin == stub_begin()) stream->print_cr("[Stub Code]"); 2754 if (block_begin == stub_begin()) stream->print_cr("[Stub Code]");
2619 if (GRAAL_ONLY(_deoptimize_offset >= 0 &&) block_begin == deopt_handler_begin()) stream->print_cr("[Deopt Handler Code]"); 2755 if (GRAAL_ONLY(_deoptimize_offset >= 0 &&) block_begin == deopt_handler_begin()) stream->print_cr("[Deopt Handler Code]");
2744 if (sd != NULL) { 2880 if (sd != NULL) {
2745 st->move_to(column); 2881 st->move_to(column);
2746 if (sd->bci() == SynchronizationEntryBCI) { 2882 if (sd->bci() == SynchronizationEntryBCI) {
2747 st->print(";*synchronization entry"); 2883 st->print(";*synchronization entry");
2748 } else { 2884 } else {
2749 if (sd->method().is_null()) { 2885 if (sd->method() == NULL) {
2750 st->print("method is NULL"); 2886 st->print("method is NULL");
2751 } else if (sd->method()->is_native()) { 2887 } else if (sd->method()->is_native()) {
2752 st->print("method is native"); 2888 st->print("method is native");
2753 } else { 2889 } else {
2754 Bytecodes::Code bc = sd->method()->java_code_at(sd->bci()); 2890 Bytecodes::Code bc = sd->method()->java_code_at(sd->bci());
2785 2921
2786 // Print all scopes 2922 // Print all scopes
2787 for (;sd != NULL; sd = sd->sender()) { 2923 for (;sd != NULL; sd = sd->sender()) {
2788 st->move_to(column); 2924 st->move_to(column);
2789 st->print("; -"); 2925 st->print("; -");
2790 if (sd->method().is_null()) { 2926 if (sd->method() == NULL) {
2791 st->print("method is NULL"); 2927 st->print("method is NULL");
2792 } else { 2928 } else {
2793 sd->method()->print_short_name(st); 2929 sd->method()->print_short_name(st);
2794 } 2930 }
2795 int lineno = sd->method()->line_number_from_bci(sd->bci()); 2931 int lineno = sd->method()->line_number_from_bci(sd->bci());
2860 DebugInformationRecorder::print_statistics(); 2996 DebugInformationRecorder::print_statistics();
2861 nmethod_stats.print_pc_stats(); 2997 nmethod_stats.print_pc_stats();
2862 Dependencies::print_statistics(); 2998 Dependencies::print_statistics();
2863 if (xtty != NULL) xtty->tail("statistics"); 2999 if (xtty != NULL) xtty->tail("statistics");
2864 } 3000 }
3001
3002 #ifdef GRAAL
3003 void DebugScopedNMethod::print_on(outputStream* st) {
3004 if (_nm != NULL) {
3005 st->print("nmethod@%p", _nm);
3006 Method* method = _nm->method();
3007 if (method != NULL) {
3008 char holder[O_BUFLEN];
3009 char nameAndSig[O_BUFLEN];
3010 method->method_holder()->name()->as_C_string(holder, O_BUFLEN);
3011 method->name_and_sig_as_C_string(nameAndSig, O_BUFLEN);
3012 st->print(" - %s::%s", holder, nameAndSig);
3013 }
3014 }
3015 }
3016 #endif