comparison src/share/vm/opto/parse1.cpp @ 14429:2113136690bc

8024921: PPC64 (part 113): Extend Load and Store nodes to know about memory ordering Summary: Add a field to C2 LoadNode and StoreNode classes which indicates whether the load/store should do an acquire/release on platforms which support it. Reviewed-by: kvn
author goetz
date Fri, 15 Nov 2013 11:05:32 -0800
parents b2ee5dc63353
children c6d7e7406136
comparison
equal deleted inserted replaced
14427:eb178e97560c 14429:2113136690bc
104 Node *ctl = control(); 104 Node *ctl = control();
105 105
106 // Very similar to LoadNode::make, except we handle un-aligned longs and 106 // Very similar to LoadNode::make, except we handle un-aligned longs and
107 // doubles on Sparc. Intel can handle them just fine directly. 107 // doubles on Sparc. Intel can handle them just fine directly.
108 Node *l; 108 Node *l;
109 switch( bt ) { // Signature is flattened 109 switch (bt) { // Signature is flattened
110 case T_INT: l = new (C) LoadINode( ctl, mem, adr, TypeRawPtr::BOTTOM ); break; 110 case T_INT: l = new (C) LoadINode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeInt::INT, MemNode::unordered); break;
111 case T_FLOAT: l = new (C) LoadFNode( ctl, mem, adr, TypeRawPtr::BOTTOM ); break; 111 case T_FLOAT: l = new (C) LoadFNode(ctl, mem, adr, TypeRawPtr::BOTTOM, Type::FLOAT, MemNode::unordered); break;
112 case T_ADDRESS: l = new (C) LoadPNode( ctl, mem, adr, TypeRawPtr::BOTTOM, TypeRawPtr::BOTTOM ); break; 112 case T_ADDRESS: l = new (C) LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeRawPtr::BOTTOM, MemNode::unordered); break;
113 case T_OBJECT: l = new (C) LoadPNode( ctl, mem, adr, TypeRawPtr::BOTTOM, TypeInstPtr::BOTTOM ); break; 113 case T_OBJECT: l = new (C) LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeInstPtr::BOTTOM, MemNode::unordered); break;
114 case T_LONG: 114 case T_LONG:
115 case T_DOUBLE: { 115 case T_DOUBLE: {
116 // Since arguments are in reverse order, the argument address 'adr' 116 // Since arguments are in reverse order, the argument address 'adr'
117 // refers to the back half of the long/double. Recompute adr. 117 // refers to the back half of the long/double. Recompute adr.
118 adr = basic_plus_adr( local_addrs_base, local_addrs, -(index+1)*wordSize ); 118 adr = basic_plus_adr(local_addrs_base, local_addrs, -(index+1)*wordSize);
119 if( Matcher::misaligned_doubles_ok ) { 119 if (Matcher::misaligned_doubles_ok) {
120 l = (bt == T_DOUBLE) 120 l = (bt == T_DOUBLE)
121 ? (Node*)new (C) LoadDNode( ctl, mem, adr, TypeRawPtr::BOTTOM ) 121 ? (Node*)new (C) LoadDNode(ctl, mem, adr, TypeRawPtr::BOTTOM, Type::DOUBLE, MemNode::unordered)
122 : (Node*)new (C) LoadLNode( ctl, mem, adr, TypeRawPtr::BOTTOM ); 122 : (Node*)new (C) LoadLNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeLong::LONG, MemNode::unordered);
123 } else { 123 } else {
124 l = (bt == T_DOUBLE) 124 l = (bt == T_DOUBLE)
125 ? (Node*)new (C) LoadD_unalignedNode( ctl, mem, adr, TypeRawPtr::BOTTOM ) 125 ? (Node*)new (C) LoadD_unalignedNode(ctl, mem, adr, TypeRawPtr::BOTTOM, MemNode::unordered)
126 : (Node*)new (C) LoadL_unalignedNode( ctl, mem, adr, TypeRawPtr::BOTTOM ); 126 : (Node*)new (C) LoadL_unalignedNode(ctl, mem, adr, TypeRawPtr::BOTTOM, MemNode::unordered);
127 } 127 }
128 break; 128 break;
129 } 129 }
130 default: ShouldNotReachHere(); 130 default: ShouldNotReachHere();
131 } 131 }
227 Node *lock_object = fetch_interpreter_state(index*2, T_OBJECT, monitors_addr, osr_buf); 227 Node *lock_object = fetch_interpreter_state(index*2, T_OBJECT, monitors_addr, osr_buf);
228 // Try and copy the displaced header to the BoxNode 228 // Try and copy the displaced header to the BoxNode
229 Node *displaced_hdr = fetch_interpreter_state((index*2) + 1, T_ADDRESS, monitors_addr, osr_buf); 229 Node *displaced_hdr = fetch_interpreter_state((index*2) + 1, T_ADDRESS, monitors_addr, osr_buf);
230 230
231 231
232 store_to_memory(control(), box, displaced_hdr, T_ADDRESS, Compile::AliasIdxRaw); 232 store_to_memory(control(), box, displaced_hdr, T_ADDRESS, Compile::AliasIdxRaw, MemNode::unordered);
233 233
234 // Build a bogus FastLockNode (no code will be generated) and push the 234 // Build a bogus FastLockNode (no code will be generated) and push the
235 // monitor into our debug info. 235 // monitor into our debug info.
236 const FastLockNode *flock = _gvn.transform(new (C) FastLockNode( 0, lock_object, box ))->as_FastLock(); 236 const FastLockNode *flock = _gvn.transform(new (C) FastLockNode( 0, lock_object, box ))->as_FastLock();
237 map()->push_monitor(flock); 237 map()->push_monitor(flock);
1929 // class is often visible so the access flags are constant. 1929 // class is often visible so the access flags are constant.
1930 Node* klass_addr = basic_plus_adr( receiver, receiver, oopDesc::klass_offset_in_bytes() ); 1930 Node* klass_addr = basic_plus_adr( receiver, receiver, oopDesc::klass_offset_in_bytes() );
1931 Node* klass = _gvn.transform( LoadKlassNode::make(_gvn, immutable_memory(), klass_addr, TypeInstPtr::KLASS) ); 1931 Node* klass = _gvn.transform( LoadKlassNode::make(_gvn, immutable_memory(), klass_addr, TypeInstPtr::KLASS) );
1932 1932
1933 Node* access_flags_addr = basic_plus_adr(klass, klass, in_bytes(Klass::access_flags_offset())); 1933 Node* access_flags_addr = basic_plus_adr(klass, klass, in_bytes(Klass::access_flags_offset()));
1934 Node* access_flags = make_load(NULL, access_flags_addr, TypeInt::INT, T_INT); 1934 Node* access_flags = make_load(NULL, access_flags_addr, TypeInt::INT, T_INT, MemNode::unordered);
1935 1935
1936 Node* mask = _gvn.transform(new (C) AndINode(access_flags, intcon(JVM_ACC_HAS_FINALIZER))); 1936 Node* mask = _gvn.transform(new (C) AndINode(access_flags, intcon(JVM_ACC_HAS_FINALIZER)));
1937 Node* check = _gvn.transform(new (C) CmpINode(mask, intcon(0))); 1937 Node* check = _gvn.transform(new (C) CmpINode(mask, intcon(0)));
1938 Node* test = _gvn.transform(new (C) BoolNode(check, BoolTest::ne)); 1938 Node* test = _gvn.transform(new (C) BoolNode(check, BoolTest::ne));
1939 1939