001/* 002 * Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved. 003 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 004 * 005 * This code is free software; you can redistribute it and/or modify it 006 * under the terms of the GNU General Public License version 2 only, as 007 * published by the Free Software Foundation. 008 * 009 * This code is distributed in the hope that it will be useful, but WITHOUT 010 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 011 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 012 * version 2 for more details (a copy is included in the LICENSE file that 013 * accompanied this code). 014 * 015 * You should have received a copy of the GNU General Public License version 016 * 2 along with this work; if not, write to the Free Software Foundation, 017 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 018 * 019 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 020 * or visit www.oracle.com if you need additional information or have any 021 * questions. 022 */ 023package com.oracle.graal.hotspot.replacements; 024 025import static com.oracle.graal.compiler.common.GraalOptions.*; 026import static com.oracle.graal.hotspot.nodes.BeginLockScopeNode.*; 027import static com.oracle.graal.hotspot.nodes.DirectCompareAndSwapNode.*; 028import static com.oracle.graal.hotspot.nodes.EndLockScopeNode.*; 029import static com.oracle.graal.hotspot.nodes.VMErrorNode.*; 030import static com.oracle.graal.hotspot.replacements.HotSpotReplacementsUtil.*; 031import static com.oracle.graal.nodes.extended.BranchProbabilityNode.*; 032import static com.oracle.graal.replacements.SnippetTemplate.*; 033 034import java.util.*; 035 036import jdk.internal.jvmci.code.*; 037import jdk.internal.jvmci.meta.*; 038import jdk.internal.jvmci.options.*; 039 040import com.oracle.graal.api.replacements.*; 041import com.oracle.graal.compiler.common.spi.*; 042import com.oracle.graal.compiler.common.type.*; 043import com.oracle.graal.graph.Node.ConstantNodeParameter; 044import com.oracle.graal.graph.Node.NodeIntrinsic; 045import com.oracle.graal.graph.iterators.*; 046import com.oracle.graal.hotspot.meta.*; 047import com.oracle.graal.hotspot.nodes.*; 048import com.oracle.graal.hotspot.word.*; 049import com.oracle.graal.nodes.*; 050import com.oracle.graal.nodes.CallTargetNode.InvokeKind; 051import com.oracle.graal.nodes.debug.*; 052import com.oracle.graal.nodes.extended.*; 053import com.oracle.graal.nodes.java.*; 054import com.oracle.graal.nodes.memory.address.*; 055import com.oracle.graal.nodes.spi.*; 056import com.oracle.graal.nodes.type.*; 057import com.oracle.graal.phases.common.inlining.*; 058import com.oracle.graal.replacements.*; 059import com.oracle.graal.replacements.Snippet.ConstantParameter; 060import com.oracle.graal.replacements.SnippetTemplate.AbstractTemplates; 061import com.oracle.graal.replacements.SnippetTemplate.Arguments; 062import com.oracle.graal.replacements.SnippetTemplate.SnippetInfo; 063import com.oracle.graal.word.*; 064 065/** 066 * Snippets used for implementing the monitorenter and monitorexit instructions. 067 * 068 * The locking algorithm used is described in the paper <a 069 * href="http://dl.acm.org/citation.cfm?id=1167515.1167496"> Eliminating synchronization-related 070 * atomic operations with biased locking and bulk rebiasing</a> by Kenneth Russell and David 071 * Detlefs. 072 */ 073public class MonitorSnippets implements Snippets { 074 075 public static class Options { 076 077 //@formatter:off 078 @Option(help = "", type = OptionType.Debug) 079 private static final OptionValue<Boolean> ProfileMonitors = new OptionValue<>(false); 080 //@formatter:on 081 } 082 083 private static final boolean PROFILE_CONTEXT = false; 084 085 @Fold 086 private static boolean doProfile() { 087 return Options.ProfileMonitors.getValue(); 088 } 089 090 /** 091 * Monitor operations on objects whose type contains this substring will be traced. 092 */ 093 private static final String TRACE_TYPE_FILTER = System.getProperty("graal.monitors.trace.typeFilter"); 094 095 /** 096 * Monitor operations in methods whose fully qualified name contains this substring will be 097 * traced. 098 */ 099 private static final String TRACE_METHOD_FILTER = System.getProperty("graal.monitors.trace.methodFilter"); 100 101 public static final boolean CHECK_BALANCED_MONITORS = Boolean.getBoolean("graal.monitors.checkBalanced"); 102 103 @Snippet 104 public static void monitorenter(Object object, KlassPointer hub, @ConstantParameter int lockDepth, @ConstantParameter Register threadRegister, @ConstantParameter Register stackPointerRegister, 105 @ConstantParameter boolean trace) { 106 verifyOop(object); 107 108 // Load the mark word - this includes a null-check on object 109 final Word mark = loadWordFromObject(object, markOffset()); 110 111 final Word lock = beginLockScope(lockDepth); 112 113 trace(trace, " object: 0x%016lx\n", Word.fromObject(object)); 114 trace(trace, " lock: 0x%016lx\n", lock); 115 trace(trace, " mark: 0x%016lx\n", mark); 116 117 incCounter(); 118 119 if (useBiasedLocking()) { 120 // See whether the lock is currently biased toward our thread and 121 // whether the epoch is still valid. 122 // Note that the runtime guarantees sufficient alignment of JavaThread 123 // pointers to allow age to be placed into low bits. 124 final Word biasableLockBits = mark.and(biasedLockMaskInPlace()); 125 126 // First check to see whether biasing is enabled for this object 127 if (probability(NOT_FREQUENT_PROBABILITY, biasableLockBits.notEqual(Word.unsigned(biasedLockPattern())))) { 128 // Biasing not enabled -> fall through to lightweight locking 129 unbiasable.inc(); 130 } else { 131 // The bias pattern is present in the object's mark word. Need to check 132 // whether the bias owner and the epoch are both still current. 133 final Word prototypeMarkWord = hub.readWord(prototypeMarkWordOffset(), PROTOTYPE_MARK_WORD_LOCATION); 134 final Word thread = registerAsWord(threadRegister); 135 final Word tmp = prototypeMarkWord.or(thread).xor(mark).and(~ageMaskInPlace()); 136 trace(trace, "prototypeMarkWord: 0x%016lx\n", prototypeMarkWord); 137 trace(trace, " thread: 0x%016lx\n", thread); 138 trace(trace, " tmp: 0x%016lx\n", tmp); 139 if (probability(FREQUENT_PROBABILITY, tmp.equal(0))) { 140 // Object is already biased to current thread -> done 141 traceObject(trace, "+lock{bias:existing}", object, true); 142 lockBiasExisting.inc(); 143 return; 144 } 145 146 // At this point we know that the mark word has the bias pattern and 147 // that we are not the bias owner in the current epoch. We need to 148 // figure out more details about the state of the mark word in order to 149 // know what operations can be legally performed on the object's 150 // mark word. 151 152 // If the low three bits in the xor result aren't clear, that means 153 // the prototype header is no longer biasable and we have to revoke 154 // the bias on this object. 155 if (probability(FREQUENT_PROBABILITY, tmp.and(biasedLockMaskInPlace()).equal(0))) { 156 // Biasing is still enabled for object's type. See whether the 157 // epoch of the current bias is still valid, meaning that the epoch 158 // bits of the mark word are equal to the epoch bits of the 159 // prototype mark word. (Note that the prototype mark word's epoch bits 160 // only change at a safepoint.) If not, attempt to rebias the object 161 // toward the current thread. Note that we must be absolutely sure 162 // that the current epoch is invalid in order to do this because 163 // otherwise the manipulations it performs on the mark word are 164 // illegal. 165 if (probability(FREQUENT_PROBABILITY, tmp.and(epochMaskInPlace()).equal(0))) { 166 // The epoch of the current bias is still valid but we know nothing 167 // about the owner; it might be set or it might be clear. Try to 168 // acquire the bias of the object using an atomic operation. If this 169 // fails we will go in to the runtime to revoke the object's bias. 170 // Note that we first construct the presumed unbiased header so we 171 // don't accidentally blow away another thread's valid bias. 172 Word unbiasedMark = mark.and(biasedLockMaskInPlace() | ageMaskInPlace() | epochMaskInPlace()); 173 Word biasedMark = unbiasedMark.or(thread); 174 trace(trace, " unbiasedMark: 0x%016lx\n", unbiasedMark); 175 trace(trace, " biasedMark: 0x%016lx\n", biasedMark); 176 if (probability(VERY_FAST_PATH_PROBABILITY, compareAndSwap(OffsetAddressNode.address(object, markOffset()), unbiasedMark, biasedMark, MARK_WORD_LOCATION).equal(unbiasedMark))) { 177 // Object is now biased to current thread -> done 178 traceObject(trace, "+lock{bias:acquired}", object, true); 179 lockBiasAcquired.inc(); 180 return; 181 } 182 // If the biasing toward our thread failed, this means that another thread 183 // owns the bias and we need to revoke that bias. The revocation will occur 184 // in the interpreter runtime. 185 traceObject(trace, "+lock{stub:revoke}", object, true); 186 lockStubRevoke.inc(); 187 } else { 188 // At this point we know the epoch has expired, meaning that the 189 // current bias owner, if any, is actually invalid. Under these 190 // circumstances _only_, are we allowed to use the current mark word 191 // value as the comparison value when doing the CAS to acquire the 192 // bias in the current epoch. In other words, we allow transfer of 193 // the bias from one thread to another directly in this situation. 194 Word biasedMark = prototypeMarkWord.or(thread); 195 trace(trace, " biasedMark: 0x%016lx\n", biasedMark); 196 if (probability(VERY_FAST_PATH_PROBABILITY, compareAndSwap(OffsetAddressNode.address(object, markOffset()), mark, biasedMark, MARK_WORD_LOCATION).equal(mark))) { 197 // Object is now biased to current thread -> done 198 traceObject(trace, "+lock{bias:transfer}", object, true); 199 lockBiasTransfer.inc(); 200 return; 201 } 202 // If the biasing toward our thread failed, then another thread 203 // succeeded in biasing it toward itself and we need to revoke that 204 // bias. The revocation will occur in the runtime in the slow case. 205 traceObject(trace, "+lock{stub:epoch-expired}", object, true); 206 lockStubEpochExpired.inc(); 207 } 208 monitorenterStubC(MONITORENTER, object, lock); 209 return; 210 } else { 211 // The prototype mark word doesn't have the bias bit set any 212 // more, indicating that objects of this data type are not supposed 213 // to be biased any more. We are going to try to reset the mark of 214 // this object to the prototype value and fall through to the 215 // CAS-based locking scheme. Note that if our CAS fails, it means 216 // that another thread raced us for the privilege of revoking the 217 // bias of this particular object, so it's okay to continue in the 218 // normal locking code. 219 Word result = compareAndSwap(OffsetAddressNode.address(object, markOffset()), mark, prototypeMarkWord, MARK_WORD_LOCATION); 220 221 // Fall through to the normal CAS-based lock, because no matter what 222 // the result of the above CAS, some thread must have succeeded in 223 // removing the bias bit from the object's header. 224 225 if (ENABLE_BREAKPOINT) { 226 bkpt(object, mark, tmp, result); 227 } 228 revokeBias.inc(); 229 } 230 } 231 } 232 233 // Create the unlocked mark word pattern 234 Word unlockedMark = mark.or(unlockedMask()); 235 trace(trace, " unlockedMark: 0x%016lx\n", unlockedMark); 236 237 // Copy this unlocked mark word into the lock slot on the stack 238 lock.writeWord(lockDisplacedMarkOffset(), unlockedMark, DISPLACED_MARK_WORD_LOCATION); 239 240 // Test if the object's mark word is unlocked, and if so, store the 241 // (address of) the lock slot into the object's mark word. 242 Word currentMark = compareAndSwap(OffsetAddressNode.address(object, markOffset()), unlockedMark, lock, MARK_WORD_LOCATION); 243 if (currentMark.notEqual(unlockedMark)) { 244 trace(trace, " currentMark: 0x%016lx\n", currentMark); 245 // The mark word in the object header was not the same. 246 // Either the object is locked by another thread or is already locked 247 // by the current thread. The latter is true if the mark word 248 // is a stack pointer into the current thread's stack, i.e.: 249 // 250 // 1) (currentMark & aligned_mask) == 0 251 // 2) rsp <= currentMark 252 // 3) currentMark <= rsp + page_size 253 // 254 // These 3 tests can be done by evaluating the following expression: 255 // 256 // (currentMark - rsp) & (aligned_mask - page_size) 257 // 258 // assuming both the stack pointer and page_size have their least 259 // significant 2 bits cleared and page_size is a power of 2 260 final Word alignedMask = Word.unsigned(wordSize() - 1); 261 final Word stackPointer = registerAsWord(stackPointerRegister).add(config().stackBias); 262 if (probability(VERY_SLOW_PATH_PROBABILITY, currentMark.subtract(stackPointer).and(alignedMask.subtract(pageSize())).notEqual(0))) { 263 // Most likely not a recursive lock, go into a slow runtime call 264 traceObject(trace, "+lock{stub:failed-cas}", object, true); 265 lockStubFailedCas.inc(); 266 monitorenterStubC(MONITORENTER, object, lock); 267 return; 268 } else { 269 // Recursively locked => write 0 to the lock slot 270 lock.writeWord(lockDisplacedMarkOffset(), Word.zero(), DISPLACED_MARK_WORD_LOCATION); 271 traceObject(trace, "+lock{cas:recursive}", object, true); 272 lockCasRecursive.inc(); 273 } 274 } else { 275 traceObject(trace, "+lock{cas}", object, true); 276 lockCas.inc(); 277 } 278 } 279 280 /** 281 * Calls straight out to the monitorenter stub. 282 */ 283 @Snippet 284 public static void monitorenterStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) { 285 verifyOop(object); 286 incCounter(); 287 if (object == null) { 288 DeoptimizeNode.deopt(DeoptimizationAction.InvalidateReprofile, DeoptimizationReason.NullCheckException); 289 } 290 // BeginLockScope nodes do not read from object so a use of object 291 // cannot float about the null check above 292 final Word lock = beginLockScope(lockDepth); 293 traceObject(trace, "+lock{stub}", object, true); 294 monitorenterStubC(MONITORENTER, object, lock); 295 } 296 297 @Snippet 298 public static void monitorexit(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) { 299 trace(trace, " object: 0x%016lx\n", Word.fromObject(object)); 300 if (useBiasedLocking()) { 301 // Check for biased locking unlock case, which is a no-op 302 // Note: we do not have to check the thread ID for two reasons. 303 // First, the interpreter checks for IllegalMonitorStateException at 304 // a higher level. Second, if the bias was revoked while we held the 305 // lock, the object could not be rebiased toward another thread, so 306 // the bias bit would be clear. 307 final Word mark = loadWordFromObject(object, markOffset()); 308 trace(trace, " mark: 0x%016lx\n", mark); 309 if (probability(FREQUENT_PROBABILITY, mark.and(biasedLockMaskInPlace()).equal(Word.unsigned(biasedLockPattern())))) { 310 endLockScope(); 311 decCounter(); 312 traceObject(trace, "-lock{bias}", object, false); 313 unlockBias.inc(); 314 return; 315 } 316 } 317 318 final Word lock = CurrentLockNode.currentLock(lockDepth); 319 320 // Load displaced mark 321 final Word displacedMark = lock.readWord(lockDisplacedMarkOffset(), DISPLACED_MARK_WORD_LOCATION); 322 trace(trace, " displacedMark: 0x%016lx\n", displacedMark); 323 324 if (displacedMark.equal(0)) { 325 // Recursive locking => done 326 traceObject(trace, "-lock{recursive}", object, false); 327 unlockCasRecursive.inc(); 328 } else { 329 verifyOop(object); 330 // Test if object's mark word is pointing to the displaced mark word, and if so, restore 331 // the displaced mark in the object - if the object's mark word is not pointing to 332 // the displaced mark word, do unlocking via runtime call. 333 if (probability(VERY_SLOW_PATH_PROBABILITY, 334 DirectCompareAndSwapNode.compareAndSwap(OffsetAddressNode.address(object, markOffset()), lock, displacedMark, MARK_WORD_LOCATION).notEqual(lock))) { 335 // The object's mark word was not pointing to the displaced header, 336 // we do unlocking via runtime call. 337 traceObject(trace, "-lock{stub}", object, false); 338 unlockStub.inc(); 339 monitorexitStubC(MONITOREXIT, object, lock); 340 } else { 341 traceObject(trace, "-lock{cas}", object, false); 342 unlockCas.inc(); 343 } 344 } 345 endLockScope(); 346 decCounter(); 347 } 348 349 /** 350 * Calls straight out to the monitorexit stub. 351 */ 352 @Snippet 353 public static void monitorexitStub(Object object, @ConstantParameter int lockDepth, @ConstantParameter boolean trace) { 354 verifyOop(object); 355 traceObject(trace, "-lock{stub}", object, false); 356 final Word lock = CurrentLockNode.currentLock(lockDepth); 357 monitorexitStubC(MONITOREXIT, object, lock); 358 endLockScope(); 359 decCounter(); 360 } 361 362 private static void traceObject(boolean enabled, String action, Object object, boolean enter) { 363 if (doProfile()) { 364 DynamicCounterNode.counter(action, enter ? "number of monitor enters" : "number of monitor exits", 1, PROFILE_CONTEXT); 365 } 366 if (enabled) { 367 Log.print(action); 368 Log.print(' '); 369 Log.printlnObject(object); 370 } 371 } 372 373 private static void trace(boolean enabled, String format, WordBase value) { 374 if (enabled) { 375 Log.printf(format, value.rawValue()); 376 } 377 } 378 379 /** 380 * Leaving the breakpoint code in to provide an example of how to use the {@link BreakpointNode} 381 * intrinsic. 382 */ 383 private static final boolean ENABLE_BREAKPOINT = false; 384 385 private static final LocationIdentity MONITOR_COUNTER_LOCATION = NamedLocationIdentity.mutable("MonitorCounter"); 386 387 @NodeIntrinsic(BreakpointNode.class) 388 static native void bkpt(Object object, Word mark, Word tmp, Word value); 389 390 private static void incCounter() { 391 if (CHECK_BALANCED_MONITORS) { 392 final Word counter = MonitorCounterNode.counter(); 393 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 394 counter.writeInt(0, count + 1, MONITOR_COUNTER_LOCATION); 395 } 396 } 397 398 private static void decCounter() { 399 if (CHECK_BALANCED_MONITORS) { 400 final Word counter = MonitorCounterNode.counter(); 401 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 402 counter.writeInt(0, count - 1, MONITOR_COUNTER_LOCATION); 403 } 404 } 405 406 @Snippet 407 private static void initCounter() { 408 final Word counter = MonitorCounterNode.counter(); 409 counter.writeInt(0, 0, MONITOR_COUNTER_LOCATION); 410 } 411 412 @Snippet 413 private static void checkCounter(@ConstantParameter String errMsg) { 414 final Word counter = MonitorCounterNode.counter(); 415 final int count = counter.readInt(0, MONITOR_COUNTER_LOCATION); 416 if (count != 0) { 417 vmError(errMsg, count); 418 } 419 } 420 421 public static class Templates extends AbstractTemplates { 422 423 private final SnippetInfo monitorenter = snippet(MonitorSnippets.class, "monitorenter"); 424 private final SnippetInfo monitorexit = snippet(MonitorSnippets.class, "monitorexit"); 425 private final SnippetInfo monitorenterStub = snippet(MonitorSnippets.class, "monitorenterStub"); 426 private final SnippetInfo monitorexitStub = snippet(MonitorSnippets.class, "monitorexitStub"); 427 private final SnippetInfo initCounter = snippet(MonitorSnippets.class, "initCounter"); 428 private final SnippetInfo checkCounter = snippet(MonitorSnippets.class, "checkCounter"); 429 430 private final boolean useFastLocking; 431 432 public Templates(HotSpotProviders providers, TargetDescription target, boolean useFastLocking) { 433 super(providers, providers.getSnippetReflection(), target); 434 this.useFastLocking = useFastLocking; 435 } 436 437 public void lower(RawMonitorEnterNode monitorenterNode, HotSpotRegistersProvider registers, LoweringTool tool) { 438 StructuredGraph graph = monitorenterNode.graph(); 439 checkBalancedMonitors(graph, tool); 440 441 assert ((ObjectStamp) monitorenterNode.object().stamp()).nonNull(); 442 443 Arguments args; 444 if (useFastLocking) { 445 args = new Arguments(monitorenter, graph.getGuardsStage(), tool.getLoweringStage()); 446 } else { 447 args = new Arguments(monitorenterStub, graph.getGuardsStage(), tool.getLoweringStage()); 448 } 449 args.add("object", monitorenterNode.object()); 450 args.add("hub", monitorenterNode.getHub()); 451 args.addConst("lockDepth", monitorenterNode.getMonitorId().getLockDepth()); 452 args.addConst("threadRegister", registers.getThreadRegister()); 453 args.addConst("stackPointerRegister", registers.getStackPointerRegister()); 454 args.addConst("trace", isTracingEnabledForType(monitorenterNode.object()) || isTracingEnabledForMethod(graph.method())); 455 456 template(args).instantiate(providers.getMetaAccess(), monitorenterNode, DEFAULT_REPLACER, args); 457 } 458 459 public void lower(MonitorExitNode monitorexitNode, LoweringTool tool) { 460 StructuredGraph graph = monitorexitNode.graph(); 461 462 Arguments args; 463 if (useFastLocking) { 464 args = new Arguments(monitorexit, graph.getGuardsStage(), tool.getLoweringStage()); 465 } else { 466 args = new Arguments(monitorexitStub, graph.getGuardsStage(), tool.getLoweringStage()); 467 } 468 args.add("object", monitorexitNode.object()); 469 args.addConst("lockDepth", monitorexitNode.getMonitorId().getLockDepth()); 470 args.addConst("trace", isTracingEnabledForType(monitorexitNode.object()) || isTracingEnabledForMethod(graph.method())); 471 472 template(args).instantiate(providers.getMetaAccess(), monitorexitNode, DEFAULT_REPLACER, args); 473 } 474 475 static boolean isTracingEnabledForType(ValueNode object) { 476 ResolvedJavaType type = StampTool.typeOrNull(object.stamp()); 477 if (TRACE_TYPE_FILTER == null) { 478 return false; 479 } else { 480 if (TRACE_TYPE_FILTER.length() == 0) { 481 return true; 482 } 483 if (type == null) { 484 return false; 485 } 486 return (type.getName().contains(TRACE_TYPE_FILTER)); 487 } 488 } 489 490 static boolean isTracingEnabledForMethod(ResolvedJavaMethod method) { 491 if (TRACE_METHOD_FILTER == null) { 492 return false; 493 } else { 494 if (TRACE_METHOD_FILTER.length() == 0) { 495 return true; 496 } 497 if (method == null) { 498 return false; 499 } 500 return (method.format("%H.%n").contains(TRACE_METHOD_FILTER)); 501 } 502 } 503 504 /** 505 * If balanced monitor checking is enabled then nodes are inserted at the start and all 506 * return points of the graph to initialize and check the monitor counter respectively. 507 */ 508 private void checkBalancedMonitors(StructuredGraph graph, LoweringTool tool) { 509 if (CHECK_BALANCED_MONITORS) { 510 NodeIterable<MonitorCounterNode> nodes = graph.getNodes().filter(MonitorCounterNode.class); 511 if (nodes.isEmpty()) { 512 // Only insert the nodes if this is the first monitorenter being lowered. 513 JavaType returnType = initCounter.getMethod().getSignature().getReturnType(initCounter.getMethod().getDeclaringClass()); 514 MethodCallTargetNode callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, initCounter.getMethod(), new ValueNode[0], returnType, null)); 515 InvokeNode invoke = graph.add(new InvokeNode(callTarget, 0)); 516 invoke.setStateAfter(graph.start().stateAfter()); 517 graph.addAfterFixed(graph.start(), invoke); 518 519 StructuredGraph inlineeGraph = providers.getReplacements().getSnippet(initCounter.getMethod(), null); 520 InliningUtil.inline(invoke, inlineeGraph, false, null); 521 522 List<ReturnNode> rets = graph.getNodes(ReturnNode.TYPE).snapshot(); 523 for (ReturnNode ret : rets) { 524 returnType = checkCounter.getMethod().getSignature().getReturnType(checkCounter.getMethod().getDeclaringClass()); 525 String msg = "unbalanced monitors in " + graph.method().format("%H.%n(%p)") + ", count = %d"; 526 ConstantNode errMsg = ConstantNode.forConstant(tool.getConstantReflection().forString(msg), providers.getMetaAccess(), graph); 527 callTarget = graph.add(new MethodCallTargetNode(InvokeKind.Static, checkCounter.getMethod(), new ValueNode[]{errMsg}, returnType, null)); 528 invoke = graph.add(new InvokeNode(callTarget, 0)); 529 FrameState stateAfter = new FrameState(null, graph.method(), BytecodeFrame.AFTER_BCI, new ValueNode[0], new ValueNode[0], 0, new ValueNode[0], null, false, false); 530 invoke.setStateAfter(graph.add(stateAfter)); 531 graph.addBeforeFixed(ret, invoke); 532 533 Arguments args = new Arguments(checkCounter, graph.getGuardsStage(), tool.getLoweringStage()); 534 args.addConst("errMsg", msg); 535 inlineeGraph = template(args).copySpecializedGraph(); 536 537 // inlineeGraph = replacements.getSnippet(checkCounter.getMethod()); 538 InliningUtil.inline(invoke, inlineeGraph, false, null); 539 } 540 } 541 } 542 } 543 } 544 545 public static final ForeignCallDescriptor MONITORENTER = new ForeignCallDescriptor("monitorenter", void.class, Object.class, Word.class); 546 public static final ForeignCallDescriptor MONITOREXIT = new ForeignCallDescriptor("monitorexit", void.class, Object.class, Word.class); 547 548 @NodeIntrinsic(ForeignCallNode.class) 549 private static native void monitorenterStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock); 550 551 @NodeIntrinsic(ForeignCallNode.class) 552 private static native void monitorexitStubC(@ConstantNodeParameter ForeignCallDescriptor descriptor, Object object, Word lock); 553 554 /** 555 * Counters for the various paths for acquiring a lock. The counters whose names start with 556 * {@code "lock"} are mutually exclusive. The other counters are for paths that may be shared. 557 */ 558 private static final SnippetCounter.Group lockCounters = SnippetCounters.getValue() ? new SnippetCounter.Group("MonitorEnters") : null; 559 static final SnippetCounter lockBiasExisting = new SnippetCounter(lockCounters, "lock{bias:existing}", "bias-locked previously biased object"); 560 static final SnippetCounter lockBiasAcquired = new SnippetCounter(lockCounters, "lock{bias:acquired}", "bias-locked newly biased object"); 561 static final SnippetCounter lockBiasTransfer = new SnippetCounter(lockCounters, "lock{bias:transfer}", "bias-locked, biased transferred"); 562 static final SnippetCounter lockCas = new SnippetCounter(lockCounters, "lock{cas}", "cas-locked an object"); 563 static final SnippetCounter lockCasRecursive = new SnippetCounter(lockCounters, "lock{cas:recursive}", "cas-locked, recursive"); 564 static final SnippetCounter lockStubEpochExpired = new SnippetCounter(lockCounters, "lock{stub:epoch-expired}", "stub-locked, epoch expired"); 565 static final SnippetCounter lockStubRevoke = new SnippetCounter(lockCounters, "lock{stub:revoke}", "stub-locked, biased revoked"); 566 static final SnippetCounter lockStubFailedCas = new SnippetCounter(lockCounters, "lock{stub:failed-cas}", "stub-locked, failed cas"); 567 568 static final SnippetCounter unbiasable = new SnippetCounter(lockCounters, "unbiasable", "object with unbiasable type"); 569 static final SnippetCounter revokeBias = new SnippetCounter(lockCounters, "revokeBias", "object had bias revoked"); 570 571 /** 572 * Counters for the various paths for releasing a lock. The counters whose names start with 573 * {@code "unlock"} are mutually exclusive. The other counters are for paths that may be shared. 574 */ 575 private static final SnippetCounter.Group unlockCounters = SnippetCounters.getValue() ? new SnippetCounter.Group("MonitorExits") : null; 576 static final SnippetCounter unlockBias = new SnippetCounter(unlockCounters, "unlock{bias}", "bias-unlocked an object"); 577 static final SnippetCounter unlockCas = new SnippetCounter(unlockCounters, "unlock{cas}", "cas-unlocked an object"); 578 static final SnippetCounter unlockCasRecursive = new SnippetCounter(unlockCounters, "unlock{cas:recursive}", "cas-unlocked an object, recursive"); 579 static final SnippetCounter unlockStub = new SnippetCounter(unlockCounters, "unlock{stub}", "stub-unlocked an object"); 580 581}