comparison src/share/vm/c1/c1_Runtime1.cpp @ 14662:3c3953fb3f2a

8033380: Experimental VM flag to enforce access atomicity Summary: -XX:+AlwaysAtomicAccesses to unconditionally enforce the access atomicity. Reviewed-by: roland, kvn, iveresov
author shade
date Mon, 03 Mar 2014 15:54:45 +0400
parents 8b80b262e501
children b51e29501f30
comparison
equal deleted inserted replaced
14661:bbfe3ac1471d 14662:3c3953fb3f2a
807 // that caller_method() == caller_code->method() 807 // that caller_method() == caller_code->method()
808 808
809 int bci = vfst.bci(); 809 int bci = vfst.bci();
810 Bytecodes::Code code = caller_method()->java_code_at(bci); 810 Bytecodes::Code code = caller_method()->java_code_at(bci);
811 811
812 #ifndef PRODUCT
813 // this is used by assertions in the access_field_patching_id 812 // this is used by assertions in the access_field_patching_id
814 BasicType patch_field_type = T_ILLEGAL; 813 BasicType patch_field_type = T_ILLEGAL;
815 #endif // PRODUCT
816 bool deoptimize_for_volatile = false; 814 bool deoptimize_for_volatile = false;
815 bool deoptimize_for_atomic = false;
817 int patch_field_offset = -1; 816 int patch_field_offset = -1;
818 KlassHandle init_klass(THREAD, NULL); // klass needed by load_klass_patching code 817 KlassHandle init_klass(THREAD, NULL); // klass needed by load_klass_patching code
819 KlassHandle load_klass(THREAD, NULL); // klass needed by load_klass_patching code 818 KlassHandle load_klass(THREAD, NULL); // klass needed by load_klass_patching code
820 Handle mirror(THREAD, NULL); // oop needed by load_mirror_patching code 819 Handle mirror(THREAD, NULL); // oop needed by load_mirror_patching code
821 Handle appendix(THREAD, NULL); // oop needed by appendix_patching code 820 Handle appendix(THREAD, NULL); // oop needed by appendix_patching code
837 // deoptimized so that the code can be regenerated correctly. 836 // deoptimized so that the code can be regenerated correctly.
838 // This check is only needed for access_field_patching since this 837 // This check is only needed for access_field_patching since this
839 // is the path for patching field offsets. load_klass is only 838 // is the path for patching field offsets. load_klass is only
840 // used for patching references to oops which don't need special 839 // used for patching references to oops which don't need special
841 // handling in the volatile case. 840 // handling in the volatile case.
841
842 deoptimize_for_volatile = result.access_flags().is_volatile(); 842 deoptimize_for_volatile = result.access_flags().is_volatile();
843 843
844 #ifndef PRODUCT 844 // If we are patching a field which should be atomic, then
845 // the generated code is not correct either, force deoptimizing.
846 // We need to only cover T_LONG and T_DOUBLE fields, as we can
847 // break access atomicity only for them.
848
849 // Strictly speaking, the deoptimizaation on 64-bit platforms
850 // is unnecessary, and T_LONG stores on 32-bit platforms need
851 // to be handled by special patching code when AlwaysAtomicAccesses
852 // becomes product feature. At this point, we are still going
853 // for the deoptimization for consistency against volatile
854 // accesses.
855
845 patch_field_type = result.field_type(); 856 patch_field_type = result.field_type();
846 #endif 857 deoptimize_for_atomic = (AlwaysAtomicAccesses && (patch_field_type == T_DOUBLE || patch_field_type == T_LONG));
858
847 } else if (load_klass_or_mirror_patch_id) { 859 } else if (load_klass_or_mirror_patch_id) {
848 Klass* k = NULL; 860 Klass* k = NULL;
849 switch (code) { 861 switch (code) {
850 case Bytecodes::_putstatic: 862 case Bytecodes::_putstatic:
851 case Bytecodes::_getstatic: 863 case Bytecodes::_getstatic:
916 } 928 }
917 } else { 929 } else {
918 ShouldNotReachHere(); 930 ShouldNotReachHere();
919 } 931 }
920 932
921 if (deoptimize_for_volatile) { 933 if (deoptimize_for_volatile || deoptimize_for_atomic) {
922 // At compile time we assumed the field wasn't volatile but after 934 // At compile time we assumed the field wasn't volatile/atomic but after
923 // loading it turns out it was volatile so we have to throw the 935 // loading it turns out it was volatile/atomic so we have to throw the
924 // compiled code out and let it be regenerated. 936 // compiled code out and let it be regenerated.
925 if (TracePatching) { 937 if (TracePatching) {
926 tty->print_cr("Deoptimizing for patching volatile field reference"); 938 if (deoptimize_for_volatile) {
939 tty->print_cr("Deoptimizing for patching volatile field reference");
940 }
941 if (deoptimize_for_atomic) {
942 tty->print_cr("Deoptimizing for patching atomic field reference");
943 }
927 } 944 }
945
928 // It's possible the nmethod was invalidated in the last 946 // It's possible the nmethod was invalidated in the last
929 // safepoint, but if it's still alive then make it not_entrant. 947 // safepoint, but if it's still alive then make it not_entrant.
930 nmethod* nm = CodeCache::find_nmethod(caller_frame.pc()); 948 nmethod* nm = CodeCache::find_nmethod(caller_frame.pc());
931 if (nm != NULL) { 949 if (nm != NULL) {
932 nm->make_not_entrant(); 950 nm->make_not_entrant();