diff src/share/vm/oops/cpCache.hpp @ 14443:3205e78d8193

8029396: PPC64 (part 212): Several memory ordering fixes in C-code. Summary: memory ordering fixes in GC and other runtime code showing on PPC64. Reviewed-by: kvn, coleenp
author goetz
date Mon, 02 Dec 2013 10:26:14 +0100
parents 41cb10cbfb3c
children 4ca6dc0799b6 0b9500028980
line wrap: on
line diff
--- a/src/share/vm/oops/cpCache.hpp	Thu Dec 05 15:13:12 2013 -0800
+++ b/src/share/vm/oops/cpCache.hpp	Mon Dec 02 10:26:14 2013 +0100
@@ -138,7 +138,7 @@
 
   void set_bytecode_1(Bytecodes::Code code);
   void set_bytecode_2(Bytecodes::Code code);
-  void set_f1(Metadata* f1)                            {
+  void set_f1(Metadata* f1) {
     Metadata* existing_f1 = (Metadata*)_f1; // read once
     assert(existing_f1 == NULL || existing_f1 == f1, "illegal field change");
     _f1 = f1;
@@ -325,14 +325,21 @@
 
   // Accessors
   int indices() const                            { return _indices; }
+  int indices_ord() const                        { return (intx)OrderAccess::load_ptr_acquire(&_indices); }
   int constant_pool_index() const                { return (indices() & cp_index_mask); }
-  Bytecodes::Code bytecode_1() const             { return Bytecodes::cast((indices() >> bytecode_1_shift) & bytecode_1_mask); }
-  Bytecodes::Code bytecode_2() const             { return Bytecodes::cast((indices() >> bytecode_2_shift) & bytecode_2_mask); }
-  Method* f1_as_method() const                   { Metadata* f1 = (Metadata*)_f1; assert(f1 == NULL || f1->is_method(), ""); return (Method*)f1; }
-  Klass*    f1_as_klass() const                  { Metadata* f1 = (Metadata*)_f1; assert(f1 == NULL || f1->is_klass(), ""); return (Klass*)f1; }
-  bool      is_f1_null() const                   { Metadata* f1 = (Metadata*)_f1; return f1 == NULL; }  // classifies a CPC entry as unbound
+  Bytecodes::Code bytecode_1() const             { return Bytecodes::cast((indices_ord() >> bytecode_1_shift) & bytecode_1_mask); }
+  Bytecodes::Code bytecode_2() const             { return Bytecodes::cast((indices_ord() >> bytecode_2_shift) & bytecode_2_mask); }
+  Metadata* f1_ord() const                       { return (Metadata *)OrderAccess::load_ptr_acquire(&_f1); }
+  Method*   f1_as_method() const                 { Metadata* f1 = f1_ord(); assert(f1 == NULL || f1->is_method(), ""); return (Method*)f1; }
+  Klass*    f1_as_klass() const                  { Metadata* f1 = f1_ord(); assert(f1 == NULL || f1->is_klass(), ""); return (Klass*)f1; }
+  // Use the accessor f1() to acquire _f1's value. This is needed for
+  // example in BytecodeInterpreter::run(), where is_f1_null() is
+  // called to check if an invokedynamic call is resolved. This load
+  // of _f1 must be ordered with the loads performed by
+  // cache->main_entry_index().
+  bool      is_f1_null() const                   { Metadata* f1 = f1_ord(); return f1 == NULL; }  // classifies a CPC entry as unbound
   int       f2_as_index() const                  { assert(!is_vfinal(), ""); return (int) _f2; }
-  Method* f2_as_vfinal_method() const            { assert(is_vfinal(), ""); return (Method*)_f2; }
+  Method*   f2_as_vfinal_method() const          { assert(is_vfinal(), ""); return (Method*)_f2; }
   int  field_index() const                       { assert(is_field_entry(),  ""); return (_flags & field_index_mask); }
   int  parameter_size() const                    { assert(is_method_entry(), ""); return (_flags & parameter_size_mask); }
   bool is_volatile() const                       { return (_flags & (1 << is_volatile_shift))       != 0; }