comparison graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/replacements/MonitorSnippets.java @ 21735:11e37c289365

added counters for MonitorSnippets
author Doug Simon <doug.simon@oracle.com>
date Thu, 04 Jun 2015 23:04:52 +0200
parents f2a6088ddebc
children 1ab2c7bb6f0f
comparison
equal deleted inserted replaced
21734:c2e90b2b3fcc 21735:11e37c289365
31 import com.oracle.jvmci.meta.ResolvedJavaType; 31 import com.oracle.jvmci.meta.ResolvedJavaType;
32 import com.oracle.jvmci.meta.LocationIdentity; 32 import com.oracle.jvmci.meta.LocationIdentity;
33 import com.oracle.jvmci.meta.ResolvedJavaMethod; 33 import com.oracle.jvmci.meta.ResolvedJavaMethod;
34 import com.oracle.jvmci.meta.DeoptimizationAction; 34 import com.oracle.jvmci.meta.DeoptimizationAction;
35 import com.oracle.jvmci.meta.ForeignCallDescriptor; 35 import com.oracle.jvmci.meta.ForeignCallDescriptor;
36
37 import static com.oracle.graal.compiler.common.GraalOptions.*;
36 import static com.oracle.graal.hotspot.nodes.BeginLockScopeNode.*; 38 import static com.oracle.graal.hotspot.nodes.BeginLockScopeNode.*;
37 import static com.oracle.graal.hotspot.nodes.DirectCompareAndSwapNode.*; 39 import static com.oracle.graal.hotspot.nodes.DirectCompareAndSwapNode.*;
38 import static com.oracle.graal.hotspot.nodes.EndLockScopeNode.*; 40 import static com.oracle.graal.hotspot.nodes.EndLockScopeNode.*;
39 import static com.oracle.graal.hotspot.nodes.VMErrorNode.*; 41 import static com.oracle.graal.hotspot.nodes.VMErrorNode.*;
40 import static com.oracle.graal.hotspot.replacements.HotSpotReplacementsUtil.*; 42 import static com.oracle.graal.hotspot.replacements.HotSpotReplacementsUtil.*;
133 final Word biasableLockBits = mark.and(biasedLockMaskInPlace()); 135 final Word biasableLockBits = mark.and(biasedLockMaskInPlace());
134 136
135 // First check to see whether biasing is enabled for this object 137 // First check to see whether biasing is enabled for this object
136 if (probability(NOT_FREQUENT_PROBABILITY, biasableLockBits.notEqual(Word.unsigned(biasedLockPattern())))) { 138 if (probability(NOT_FREQUENT_PROBABILITY, biasableLockBits.notEqual(Word.unsigned(biasedLockPattern())))) {
137 // Biasing not enabled -> fall through to lightweight locking 139 // Biasing not enabled -> fall through to lightweight locking
140 unbiasable.inc();
138 } else { 141 } else {
139 // The bias pattern is present in the object's mark word. Need to check 142 // The bias pattern is present in the object's mark word. Need to check
140 // whether the bias owner and the epoch are both still current. 143 // whether the bias owner and the epoch are both still current.
141 KlassPointer hub = loadHubIntrinsic(object, anchorNode); 144 KlassPointer hub = loadHubIntrinsic(object, anchorNode);
142 final Word prototypeMarkWord = hub.readWord(prototypeMarkWordOffset(), PROTOTYPE_MARK_WORD_LOCATION); 145 final Word prototypeMarkWord = hub.readWord(prototypeMarkWordOffset(), PROTOTYPE_MARK_WORD_LOCATION);
146 trace(trace, " thread: 0x%016lx\n", thread); 149 trace(trace, " thread: 0x%016lx\n", thread);
147 trace(trace, " tmp: 0x%016lx\n", tmp); 150 trace(trace, " tmp: 0x%016lx\n", tmp);
148 if (probability(FREQUENT_PROBABILITY, tmp.equal(0))) { 151 if (probability(FREQUENT_PROBABILITY, tmp.equal(0))) {
149 // Object is already biased to current thread -> done 152 // Object is already biased to current thread -> done
150 traceObject(trace, "+lock{bias:existing}", object, true); 153 traceObject(trace, "+lock{bias:existing}", object, true);
154 lockBiasExisting.inc();
151 return; 155 return;
152 } 156 }
153 157
154 // At this point we know that the mark word has the bias pattern and 158 // At this point we know that the mark word has the bias pattern and
155 // that we are not the bias owner in the current epoch. We need to 159 // that we are not the bias owner in the current epoch. We need to
182 trace(trace, " unbiasedMark: 0x%016lx\n", unbiasedMark); 186 trace(trace, " unbiasedMark: 0x%016lx\n", unbiasedMark);
183 trace(trace, " biasedMark: 0x%016lx\n", biasedMark); 187 trace(trace, " biasedMark: 0x%016lx\n", biasedMark);
184 if (probability(VERY_FAST_PATH_PROBABILITY, compareAndSwap(object, markOffset(), unbiasedMark, biasedMark, MARK_WORD_LOCATION).equal(unbiasedMark))) { 188 if (probability(VERY_FAST_PATH_PROBABILITY, compareAndSwap(object, markOffset(), unbiasedMark, biasedMark, MARK_WORD_LOCATION).equal(unbiasedMark))) {
185 // Object is now biased to current thread -> done 189 // Object is now biased to current thread -> done
186 traceObject(trace, "+lock{bias:acquired}", object, true); 190 traceObject(trace, "+lock{bias:acquired}", object, true);
191 lockBiasAcquired.inc();
187 return; 192 return;
188 } 193 }
189 // If the biasing toward our thread failed, this means that another thread 194 // If the biasing toward our thread failed, this means that another thread
190 // owns the bias and we need to revoke that bias. The revocation will occur 195 // owns the bias and we need to revoke that bias. The revocation will occur
191 // in the interpreter runtime. 196 // in the interpreter runtime.
192 traceObject(trace, "+lock{stub:revoke}", object, true); 197 traceObject(trace, "+lock{stub:revoke}", object, true);
198 lockStubRevoke.inc();
193 } else { 199 } else {
194 // At this point we know the epoch has expired, meaning that the 200 // At this point we know the epoch has expired, meaning that the
195 // current bias owner, if any, is actually invalid. Under these 201 // current bias owner, if any, is actually invalid. Under these
196 // circumstances _only_, are we allowed to use the current mark word 202 // circumstances _only_, are we allowed to use the current mark word
197 // value as the comparison value when doing the CAS to acquire the 203 // value as the comparison value when doing the CAS to acquire the
200 Word biasedMark = prototypeMarkWord.or(thread); 206 Word biasedMark = prototypeMarkWord.or(thread);
201 trace(trace, " biasedMark: 0x%016lx\n", biasedMark); 207 trace(trace, " biasedMark: 0x%016lx\n", biasedMark);
202 if (probability(VERY_FAST_PATH_PROBABILITY, compareAndSwap(object, markOffset(), mark, biasedMark, MARK_WORD_LOCATION).equal(mark))) { 208 if (probability(VERY_FAST_PATH_PROBABILITY, compareAndSwap(object, markOffset(), mark, biasedMark, MARK_WORD_LOCATION).equal(mark))) {
203 // Object is now biased to current thread -> done 209 // Object is now biased to current thread -> done
204 traceObject(trace, "+lock{bias:transfer}", object, true); 210 traceObject(trace, "+lock{bias:transfer}", object, true);
211 lockBiasTransfer.inc();
205 return; 212 return;
206 } 213 }
207 // If the biasing toward our thread failed, then another thread 214 // If the biasing toward our thread failed, then another thread
208 // succeeded in biasing it toward itself and we need to revoke that 215 // succeeded in biasing it toward itself and we need to revoke that
209 // bias. The revocation will occur in the runtime in the slow case. 216 // bias. The revocation will occur in the runtime in the slow case.
210 traceObject(trace, "+lock{stub:epoch-expired}", object, true); 217 traceObject(trace, "+lock{stub:epoch-expired}", object, true);
218 lockStubEpochExpired.inc();
211 } 219 }
212 monitorenterStubC(MONITORENTER, object, lock); 220 monitorenterStubC(MONITORENTER, object, lock);
213 return; 221 return;
214 } else { 222 } else {
215 // The prototype mark word doesn't have the bias bit set any 223 // The prototype mark word doesn't have the bias bit set any
227 // removing the bias bit from the object's header. 235 // removing the bias bit from the object's header.
228 236
229 if (ENABLE_BREAKPOINT) { 237 if (ENABLE_BREAKPOINT) {
230 bkpt(object, mark, tmp, result); 238 bkpt(object, mark, tmp, result);
231 } 239 }
240 revokeBias.inc();
232 } 241 }
233 } 242 }
234 } 243 }
235 244
236 // Create the unlocked mark word pattern 245 // Create the unlocked mark word pattern
263 final Word alignedMask = Word.unsigned(wordSize() - 1); 272 final Word alignedMask = Word.unsigned(wordSize() - 1);
264 final Word stackPointer = registerAsWord(stackPointerRegister).add(config().stackBias); 273 final Word stackPointer = registerAsWord(stackPointerRegister).add(config().stackBias);
265 if (probability(VERY_SLOW_PATH_PROBABILITY, currentMark.subtract(stackPointer).and(alignedMask.subtract(pageSize())).notEqual(0))) { 274 if (probability(VERY_SLOW_PATH_PROBABILITY, currentMark.subtract(stackPointer).and(alignedMask.subtract(pageSize())).notEqual(0))) {
266 // Most likely not a recursive lock, go into a slow runtime call 275 // Most likely not a recursive lock, go into a slow runtime call
267 traceObject(trace, "+lock{stub:failed-cas}", object, true); 276 traceObject(trace, "+lock{stub:failed-cas}", object, true);
277 lockStubFailedCas.inc();
268 monitorenterStubC(MONITORENTER, object, lock); 278 monitorenterStubC(MONITORENTER, object, lock);
269 return; 279 return;
270 } else { 280 } else {
271 // Recursively locked => write 0 to the lock slot 281 // Recursively locked => write 0 to the lock slot
272 lock.writeWord(lockDisplacedMarkOffset(), Word.zero(), DISPLACED_MARK_WORD_LOCATION); 282 lock.writeWord(lockDisplacedMarkOffset(), Word.zero(), DISPLACED_MARK_WORD_LOCATION);
273 traceObject(trace, "+lock{recursive}", object, true); 283 traceObject(trace, "+lock{cas:recursive}", object, true);
284 lockCasRecursive.inc();
274 } 285 }
275 } else { 286 } else {
276 traceObject(trace, "+lock{cas}", object, true); 287 traceObject(trace, "+lock{cas}", object, true);
288 lockCas.inc();
277 } 289 }
278 } 290 }
279 291
280 /** 292 /**
281 * Calls straight out to the monitorenter stub. 293 * Calls straight out to the monitorenter stub.
308 trace(trace, " mark: 0x%016lx\n", mark); 320 trace(trace, " mark: 0x%016lx\n", mark);
309 if (probability(FREQUENT_PROBABILITY, mark.and(biasedLockMaskInPlace()).equal(Word.unsigned(biasedLockPattern())))) { 321 if (probability(FREQUENT_PROBABILITY, mark.and(biasedLockMaskInPlace()).equal(Word.unsigned(biasedLockPattern())))) {
310 endLockScope(); 322 endLockScope();
311 decCounter(); 323 decCounter();
312 traceObject(trace, "-lock{bias}", object, false); 324 traceObject(trace, "-lock{bias}", object, false);
325 unlockBias.inc();
313 return; 326 return;
314 } 327 }
315 } 328 }
316 329
317 final Word lock = CurrentLockNode.currentLock(lockDepth); 330 final Word lock = CurrentLockNode.currentLock(lockDepth);
321 trace(trace, " displacedMark: 0x%016lx\n", displacedMark); 334 trace(trace, " displacedMark: 0x%016lx\n", displacedMark);
322 335
323 if (displacedMark.equal(0)) { 336 if (displacedMark.equal(0)) {
324 // Recursive locking => done 337 // Recursive locking => done
325 traceObject(trace, "-lock{recursive}", object, false); 338 traceObject(trace, "-lock{recursive}", object, false);
339 unlockCasRecursive.inc();
326 } else { 340 } else {
327 verifyOop(object); 341 verifyOop(object);
328 // Test if object's mark word is pointing to the displaced mark word, and if so, restore 342 // Test if object's mark word is pointing to the displaced mark word, and if so, restore
329 // the displaced mark in the object - if the object's mark word is not pointing to 343 // the displaced mark in the object - if the object's mark word is not pointing to
330 // the displaced mark word, do unlocking via runtime call. 344 // the displaced mark word, do unlocking via runtime call.
331 if (probability(VERY_SLOW_PATH_PROBABILITY, DirectCompareAndSwapNode.compareAndSwap(object, markOffset(), lock, displacedMark, MARK_WORD_LOCATION).notEqual(lock))) { 345 if (probability(VERY_SLOW_PATH_PROBABILITY, DirectCompareAndSwapNode.compareAndSwap(object, markOffset(), lock, displacedMark, MARK_WORD_LOCATION).notEqual(lock))) {
332 // The object's mark word was not pointing to the displaced header, 346 // The object's mark word was not pointing to the displaced header,
333 // we do unlocking via runtime call. 347 // we do unlocking via runtime call.
334 traceObject(trace, "-lock{stub}", object, false); 348 traceObject(trace, "-lock{stub}", object, false);
349 unlockStub.inc();
335 monitorexitStubC(MONITOREXIT, object, lock); 350 monitorexitStubC(MONITOREXIT, object, lock);
336 } else { 351 } else {
337 traceObject(trace, "-lock{cas}", object, false); 352 traceObject(trace, "-lock{cas}", object, false);
353 unlockCas.inc();
338 } 354 }
339 } 355 }
340 endLockScope(); 356 endLockScope();
341 decCounter(); 357 decCounter();
342 } 358 }
541 private static native void monitorenterStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock); 557 private static native void monitorenterStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock);
542 558
543 @NodeIntrinsic(ForeignCallNode.class) 559 @NodeIntrinsic(ForeignCallNode.class)
544 private static native void monitorexitStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock); 560 private static native void monitorexitStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock);
545 561
562 /**
563 * Counters for the various paths for acquiring a lock. The counters whose names start with
564 * {@code "lock"} are mutually exclusive. The other counters are for paths that may be shared.
565 */
566 private static final SnippetCounter.Group lockCounters = SnippetCounters.getValue() ? new SnippetCounter.Group("MonitorEnters") : null;
567 static final SnippetCounter lockBiasExisting = new SnippetCounter(lockCounters, "lock{bias:existing}", "bias-locked previously biased object");
568 static final SnippetCounter lockBiasAcquired = new SnippetCounter(lockCounters, "lock{bias:acquired}", "bias-locked newly biased object");
569 static final SnippetCounter lockBiasTransfer = new SnippetCounter(lockCounters, "lock{bias:transfer}", "bias-locked, biased transferred");
570 static final SnippetCounter lockCas = new SnippetCounter(lockCounters, "lock{cas}", "cas-locked an object");
571 static final SnippetCounter lockCasRecursive = new SnippetCounter(lockCounters, "lock{cas:recursive}", "cas-locked, recursive");
572 static final SnippetCounter lockStubEpochExpired = new SnippetCounter(lockCounters, "lock{stub:epoch-expired}", "stub-locked, epoch expired");
573 static final SnippetCounter lockStubRevoke = new SnippetCounter(lockCounters, "lock{stub:revoke}", "stub-locked, biased revoked");
574 static final SnippetCounter lockStubFailedCas = new SnippetCounter(lockCounters, "lock{stub:failed-cas}", "stub-locked, failed cas");
575
576 static final SnippetCounter unbiasable = new SnippetCounter(lockCounters, "unbiasable", "object with unbiasable type");
577 static final SnippetCounter revokeBias = new SnippetCounter(lockCounters, "revokeBias", "object had bias revoked");
578
579 /**
580 * Counters for the various paths for releasing a lock. The counters whose names start with
581 * {@code "unlock"} are mutually exclusive. The other counters are for paths that may be shared.
582 */
583 private static final SnippetCounter.Group unlockCounters = SnippetCounters.getValue() ? new SnippetCounter.Group("MonitorExits") : null;
584 static final SnippetCounter unlockBias = new SnippetCounter(unlockCounters, "unlock{bias}", "bias-unlocked an object");
585 static final SnippetCounter unlockCas = new SnippetCounter(unlockCounters, "unlock{cas}", "cas-unlocked an object");
586 static final SnippetCounter unlockCasRecursive = new SnippetCounter(unlockCounters, "unlock{cas:recursive}", "cas-unlocked an object, recursive");
587 static final SnippetCounter unlockStub = new SnippetCounter(unlockCounters, "unlock{stub}", "stub-unlocked an object");
588
546 } 589 }