changeset 21735:11e37c289365

added counters for MonitorSnippets
author Doug Simon <doug.simon@oracle.com>
date Thu, 04 Jun 2015 23:04:52 +0200
parents c2e90b2b3fcc
children 28943e5044b5
files graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/replacements/MonitorSnippets.java
diffstat 1 files changed, 44 insertions(+), 1 deletions(-) [+]
line wrap: on
line diff
--- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/replacements/MonitorSnippets.java	Thu Jun 04 12:50:21 2015 -0700
+++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/replacements/MonitorSnippets.java	Thu Jun 04 23:04:52 2015 +0200
@@ -33,6 +33,8 @@
 import com.oracle.jvmci.meta.ResolvedJavaMethod;
 import com.oracle.jvmci.meta.DeoptimizationAction;
 import com.oracle.jvmci.meta.ForeignCallDescriptor;
+
+import static com.oracle.graal.compiler.common.GraalOptions.*;
 import static com.oracle.graal.hotspot.nodes.BeginLockScopeNode.*;
 import static com.oracle.graal.hotspot.nodes.DirectCompareAndSwapNode.*;
 import static com.oracle.graal.hotspot.nodes.EndLockScopeNode.*;
@@ -135,6 +137,7 @@
             // First check to see whether biasing is enabled for this object
             if (probability(NOT_FREQUENT_PROBABILITY, biasableLockBits.notEqual(Word.unsigned(biasedLockPattern())))) {
                 // Biasing not enabled -> fall through to lightweight locking
+                unbiasable.inc();
             } else {
                 // The bias pattern is present in the object's mark word. Need to check
                 // whether the bias owner and the epoch are both still current.
@@ -148,6 +151,7 @@
                 if (probability(FREQUENT_PROBABILITY, tmp.equal(0))) {
                     // Object is already biased to current thread -> done
                     traceObject(trace, "+lock{bias:existing}", object, true);
+                    lockBiasExisting.inc();
                     return;
                 }
 
@@ -184,12 +188,14 @@
                         if (probability(VERY_FAST_PATH_PROBABILITY, compareAndSwap(object, markOffset(), unbiasedMark, biasedMark, MARK_WORD_LOCATION).equal(unbiasedMark))) {
                             // Object is now biased to current thread -> done
                             traceObject(trace, "+lock{bias:acquired}", object, true);
+                            lockBiasAcquired.inc();
                             return;
                         }
                         // If the biasing toward our thread failed, this means that another thread
                         // owns the bias and we need to revoke that bias. The revocation will occur
                         // in the interpreter runtime.
                         traceObject(trace, "+lock{stub:revoke}", object, true);
+                        lockStubRevoke.inc();
                     } else {
                         // At this point we know the epoch has expired, meaning that the
                         // current bias owner, if any, is actually invalid. Under these
@@ -202,12 +208,14 @@
                         if (probability(VERY_FAST_PATH_PROBABILITY, compareAndSwap(object, markOffset(), mark, biasedMark, MARK_WORD_LOCATION).equal(mark))) {
                             // Object is now biased to current thread -> done
                             traceObject(trace, "+lock{bias:transfer}", object, true);
+                            lockBiasTransfer.inc();
                             return;
                         }
                         // If the biasing toward our thread failed, then another thread
                         // succeeded in biasing it toward itself and we need to revoke that
                         // bias. The revocation will occur in the runtime in the slow case.
                         traceObject(trace, "+lock{stub:epoch-expired}", object, true);
+                        lockStubEpochExpired.inc();
                     }
                     monitorenterStubC(MONITORENTER, object, lock);
                     return;
@@ -229,6 +237,7 @@
                     if (ENABLE_BREAKPOINT) {
                         bkpt(object, mark, tmp, result);
                     }
+                    revokeBias.inc();
                 }
             }
         }
@@ -265,15 +274,18 @@
             if (probability(VERY_SLOW_PATH_PROBABILITY, currentMark.subtract(stackPointer).and(alignedMask.subtract(pageSize())).notEqual(0))) {
                 // Most likely not a recursive lock, go into a slow runtime call
                 traceObject(trace, "+lock{stub:failed-cas}", object, true);
+                lockStubFailedCas.inc();
                 monitorenterStubC(MONITORENTER, object, lock);
                 return;
             } else {
                 // Recursively locked => write 0 to the lock slot
                 lock.writeWord(lockDisplacedMarkOffset(), Word.zero(), DISPLACED_MARK_WORD_LOCATION);
-                traceObject(trace, "+lock{recursive}", object, true);
+                traceObject(trace, "+lock{cas:recursive}", object, true);
+                lockCasRecursive.inc();
             }
         } else {
             traceObject(trace, "+lock{cas}", object, true);
+            lockCas.inc();
         }
     }
 
@@ -310,6 +322,7 @@
                 endLockScope();
                 decCounter();
                 traceObject(trace, "-lock{bias}", object, false);
+                unlockBias.inc();
                 return;
             }
         }
@@ -323,6 +336,7 @@
         if (displacedMark.equal(0)) {
             // Recursive locking => done
             traceObject(trace, "-lock{recursive}", object, false);
+            unlockCasRecursive.inc();
         } else {
             verifyOop(object);
             // Test if object's mark word is pointing to the displaced mark word, and if so, restore
@@ -332,9 +346,11 @@
                 // The object's mark word was not pointing to the displaced header,
                 // we do unlocking via runtime call.
                 traceObject(trace, "-lock{stub}", object, false);
+                unlockStub.inc();
                 monitorexitStubC(MONITOREXIT, object, lock);
             } else {
                 traceObject(trace, "-lock{cas}", object, false);
+                unlockCas.inc();
             }
         }
         endLockScope();
@@ -543,4 +559,31 @@
     @NodeIntrinsic(ForeignCallNode.class)
     private static native void monitorexitStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock);
 
+    /**
+     * Counters for the various paths for acquiring a lock. The counters whose names start with
+     * {@code "lock"} are mutually exclusive. The other counters are for paths that may be shared.
+     */
+    private static final SnippetCounter.Group lockCounters = SnippetCounters.getValue() ? new SnippetCounter.Group("MonitorEnters") : null;
+    static final SnippetCounter lockBiasExisting = new SnippetCounter(lockCounters, "lock{bias:existing}", "bias-locked previously biased object");
+    static final SnippetCounter lockBiasAcquired = new SnippetCounter(lockCounters, "lock{bias:acquired}", "bias-locked newly biased object");
+    static final SnippetCounter lockBiasTransfer = new SnippetCounter(lockCounters, "lock{bias:transfer}", "bias-locked, biased transferred");
+    static final SnippetCounter lockCas = new SnippetCounter(lockCounters, "lock{cas}", "cas-locked an object");
+    static final SnippetCounter lockCasRecursive = new SnippetCounter(lockCounters, "lock{cas:recursive}", "cas-locked, recursive");
+    static final SnippetCounter lockStubEpochExpired = new SnippetCounter(lockCounters, "lock{stub:epoch-expired}", "stub-locked, epoch expired");
+    static final SnippetCounter lockStubRevoke = new SnippetCounter(lockCounters, "lock{stub:revoke}", "stub-locked, biased revoked");
+    static final SnippetCounter lockStubFailedCas = new SnippetCounter(lockCounters, "lock{stub:failed-cas}", "stub-locked, failed cas");
+
+    static final SnippetCounter unbiasable = new SnippetCounter(lockCounters, "unbiasable", "object with unbiasable type");
+    static final SnippetCounter revokeBias = new SnippetCounter(lockCounters, "revokeBias", "object had bias revoked");
+
+    /**
+     * Counters for the various paths for releasing a lock. The counters whose names start with
+     * {@code "unlock"} are mutually exclusive. The other counters are for paths that may be shared.
+     */
+    private static final SnippetCounter.Group unlockCounters = SnippetCounters.getValue() ? new SnippetCounter.Group("MonitorExits") : null;
+    static final SnippetCounter unlockBias = new SnippetCounter(unlockCounters, "unlock{bias}", "bias-unlocked an object");
+    static final SnippetCounter unlockCas = new SnippetCounter(unlockCounters, "unlock{cas}", "cas-unlocked an object");
+    static final SnippetCounter unlockCasRecursive = new SnippetCounter(unlockCounters, "unlock{cas:recursive}", "cas-unlocked an object, recursive");
+    static final SnippetCounter unlockStub = new SnippetCounter(unlockCounters, "unlock{stub}", "stub-unlocked an object");
+
 }