# HG changeset patch # User Christian Haeubl # Date 1355486569 -3600 # Node ID 8a3efb8c831d022c0405763400e8ed8a4efdb21b # Parent 31c4d9f9e9221c7adb95264f9911cb7cf7df5a81# Parent 2ed8d74e5984fc7796e9164206aa31f8f7925292 Merge. diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.api.code/src/com/oracle/graal/api/code/Assumptions.java --- a/graal/com.oracle.graal.api.code/src/com/oracle/graal/api/code/Assumptions.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.api.code/src/com/oracle/graal/api/code/Assumptions.java Fri Dec 14 13:02:49 2012 +0100 @@ -64,7 +64,7 @@ public ConcreteSubtype(ResolvedJavaType context, ResolvedJavaType subtype) { this.context = context; this.subtype = subtype; - assert !subtype.isInterface(); + assert !subtype.isInterface() : subtype.toString() + " : " + context.toString(); } @Override @@ -267,7 +267,7 @@ record(new MethodContents(method)); } - private void record(Assumption assumption) { + public void record(Assumption assumption) { if (list == null) { list = new Assumption[4]; } else { diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.asm.amd64/src/com/oracle/graal/asm/amd64/AMD64AsmOptions.java --- a/graal/com.oracle.graal.asm.amd64/src/com/oracle/graal/asm/amd64/AMD64AsmOptions.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.asm.amd64/src/com/oracle/graal/asm/amd64/AMD64AsmOptions.java Fri Dec 14 13:02:49 2012 +0100 @@ -24,7 +24,7 @@ public class AMD64AsmOptions { public static int Atomics = 0; - public static boolean UseNormalNop = true; + public static boolean UseNormalNop = false; public static boolean UseAddressNop = true; public static boolean UseIncDec = false; public static boolean UseXmmLoadAndClearUpper = true; diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.compiler.test/src/com/oracle/graal/compiler/test/inlining/InliningTest.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/graal/com.oracle.graal.compiler.test/src/com/oracle/graal/compiler/test/inlining/InliningTest.java Fri Dec 14 13:02:49 2012 +0100 @@ -0,0 +1,276 @@ +/* + * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.graal.compiler.test.inlining; + +import static org.junit.Assert.*; + +import java.util.concurrent.*; + +import org.junit.*; + +import com.oracle.graal.api.code.*; +import com.oracle.graal.compiler.test.*; +import com.oracle.graal.debug.*; +import com.oracle.graal.graph.*; +import com.oracle.graal.nodes.*; +import com.oracle.graal.phases.*; +import com.oracle.graal.phases.common.*; + +// TODO (chaeubl): add more test cases +@SuppressWarnings("unused") +public class InliningTest extends GraalCompilerTest { + @Test + public void testInvokeStaticInlining() { + assertInlined(getGraph("invokeStaticSnippet")); + assertInlined(getGraph("invokeStaticOnInstanceSnippet")); + } + + @SuppressWarnings("all") + public static Boolean invokeStaticSnippet(boolean value) { + return Boolean.valueOf(value); + } + @SuppressWarnings("all") + public static Boolean invokeStaticOnInstanceSnippet(Boolean obj, boolean value) { + return obj.valueOf(value); + } + + + @Test + public void testStaticBindableInlining() { + assertInlined(getGraph("invokeConstructorSnippet")); + assertInlined(getGraph("invokeFinalMethodSnippet")); + assertInlined(getGraph("invokeMethodOnFinalClassSnippet")); + } + + @SuppressWarnings("all") + public static Object invokeConstructorSnippet(int value) { + return new SuperClass(value); + } + @SuppressWarnings("all") + public static int invokeFinalMethodSnippet(SuperClass superClass, SubClassA subClassA, FinalSubClass finalSubClass) { + return superClass.publicFinalMethod() + + subClassA.publicFinalMethod() + + finalSubClass.publicFinalMethod() + + superClass.protectedFinalMethod() + + subClassA.protectedFinalMethod() + + finalSubClass.protectedFinalMethod(); + } + @SuppressWarnings("all") + public static int invokeMethodOnFinalClassSnippet(FinalSubClass finalSubClass) { + return finalSubClass.publicFinalMethod() + + finalSubClass.publicNotOverriddenMethod() + + finalSubClass.publicOverriddenMethod() + + finalSubClass.protectedFinalMethod() + + finalSubClass.protectedNotOverriddenMethod() + + finalSubClass.protectedOverriddenMethod(); + } + + + @Test + public void testClassHierarchyAnalysis() { + assertInlined(getGraph("invokeLeafClassMethodSnippet")); + assertInlined(getGraph("invokeConcreteMethodSnippet")); + assertInlined(getGraph("invokeSingleImplementorInterfaceSnippet")); + assertInlined(getGraph("invokeConcreteInterfaceMethodSnippet")); + + assertNotInlined(getGraph("invokeOverriddenInterfaceMethodSnippet")); + } + + @SuppressWarnings("all") + public static int invokeLeafClassMethodSnippet(SubClassA subClassA) { + return subClassA.publicFinalMethod() + + subClassA.publicNotOverriddenMethod() + + subClassA.publicOverriddenMethod(); + } + @SuppressWarnings("all") + public static int invokeConcreteMethodSnippet(SuperClass superClass) { + return superClass.publicNotOverriddenMethod() + + superClass.protectedNotOverriddenMethod(); + } + @SuppressWarnings("all") + public static int invokeSingleImplementorInterfaceSnippet(SingleImplementorInterface testInterface) { + return testInterface.publicNotOverriddenMethod() + + testInterface.publicOverriddenMethod(); + } + @SuppressWarnings("all") + public static int invokeConcreteInterfaceMethodSnippet(MultipleImplementorsInterface testInterface) { + return testInterface.publicNotOverriddenMethod(); + } + @SuppressWarnings("all") + public static int invokeOverriddenInterfaceMethodSnippet(MultipleImplementorsInterface testInterface) { + return testInterface.publicOverriddenMethod(); + } + + private StructuredGraph getGraph(final String snippet) { + return Debug.scope("InliningTest", new DebugDumpScope(snippet), new Callable() { + @Override + public StructuredGraph call() { + StructuredGraph graph = parse(snippet); + PhasePlan phasePlan = getDefaultPhasePlan(); + Assumptions assumptions = new Assumptions(true); + new ComputeProbabilityPhase().apply(graph); + Debug.dump(graph, "Graph"); + new InliningPhase(null, runtime(), null, assumptions, null, phasePlan, OptimisticOptimizations.ALL).apply(graph); + Debug.dump(graph, "Graph"); + new CanonicalizerPhase(null, runtime(), assumptions).apply(graph); + new DeadCodeEliminationPhase().apply(graph); + return graph; + } + }); + } + + private static StructuredGraph assertInlined(StructuredGraph graph) { + return assertNotInGraph(graph, Invoke.class); + } + + private static StructuredGraph assertNotInlined(StructuredGraph graph) { + return assertInGraph(graph, Invoke.class); + } + + private static StructuredGraph assertNotInGraph(StructuredGraph graph, Class clazz) { + for (Node node: graph.getNodes()) { + if (clazz.isInstance(node)) { + fail(node.toString()); + } + } + return graph; + } + + private static StructuredGraph assertInGraph(StructuredGraph graph, Class clazz) { + for (Node node: graph.getNodes()) { + if (clazz.isInstance(node)) { + return graph; + } + } + fail("Graph does not contain a node of class " + clazz.getName()); + return graph; + } + + + // some interfaces and classes for testing + private interface MultipleImplementorsInterface { + int publicNotOverriddenMethod(); + int publicOverriddenMethod(); + } + + private interface SingleImplementorInterface { + int publicNotOverriddenMethod(); + int publicOverriddenMethod(); + } + + private static class SuperClass implements MultipleImplementorsInterface { + protected int value; + + public SuperClass(int value) { + this.value = value; + } + + public int publicNotOverriddenMethod() { + return value; + } + + public int publicOverriddenMethod() { + return value; + } + + protected int protectedNotOverriddenMethod() { + return value; + } + + protected int protectedOverriddenMethod() { + return value; + } + + public final int publicFinalMethod() { + return value + 255; + } + + protected final int protectedFinalMethod() { + return value + 255; + } + } + + private static class SubClassA extends SuperClass implements SingleImplementorInterface { + public SubClassA(int value) { + super(value); + } + + @Override + public int publicOverriddenMethod() { + return value + 2; + } + + @Override + protected int protectedOverriddenMethod() { + return value * 2; + } + } + + private static class SubClassB extends SuperClass { + public SubClassB(int value) { + super(value); + } + + @Override + public int publicOverriddenMethod() { + return value + 3; + } + + @Override + protected int protectedOverriddenMethod() { + return value * 3; + } + } + + private static class SubClassC extends SuperClass { + public SubClassC(int value) { + super(value); + } + + @Override + public int publicOverriddenMethod() { + return value + 4; + } + + @Override + protected int protectedOverriddenMethod() { + return value * 4; + } + } + + private static final class FinalSubClass extends SuperClass { + public FinalSubClass(int value) { + super(value); + } + + @Override + public int publicOverriddenMethod() { + return value + 5; + } + + @Override + protected int protectedOverriddenMethod() { + return value * 5; + } + } +} diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.compiler/src/com/oracle/graal/compiler/gen/LIRGenerator.java --- a/graal/com.oracle.graal.compiler/src/com/oracle/graal/compiler/gen/LIRGenerator.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.compiler/src/com/oracle/graal/compiler/gen/LIRGenerator.java Fri Dec 14 13:02:49 2012 +0100 @@ -173,7 +173,7 @@ (isConstant(operand) && x.kind() == operand.getKind().getStackKind()) : operand.getKind() + " for node " + x; assert operand(x) == null : "operand cannot be set twice"; assert operand != null && isLegal(operand) : "operand must be legal"; - assert operand.getKind().getStackKind() == x.kind(); + assert operand.getKind().getStackKind() == x.kind() : operand.getKind().getStackKind() + " must match " + x.kind(); assert !(x instanceof VirtualObjectNode); nodeOperands.set(x, operand); return operand; diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotRuntime.java --- a/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotRuntime.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot.amd64/src/com/oracle/graal/hotspot/amd64/AMD64HotSpotRuntime.java Fri Dec 14 13:02:49 2012 +0100 @@ -34,6 +34,8 @@ import static com.oracle.graal.hotspot.nodes.NewMultiArrayStubCall.*; import static com.oracle.graal.hotspot.nodes.VMErrorNode.*; import static com.oracle.graal.hotspot.nodes.VerifyOopStubCall.*; +import static com.oracle.graal.hotspot.nodes.IdentityHashCodeStubCall.*; +import static com.oracle.graal.hotspot.nodes.ThreadIsInterruptedStubCall.*; import static com.oracle.graal.lir.amd64.AMD64Call.*; import com.oracle.graal.api.code.*; @@ -46,7 +48,6 @@ public AMD64HotSpotRuntime(HotSpotVMConfig config, HotSpotGraalRuntime graalRuntime) { super(config, graalRuntime); - HotSpotVMConfig c = config; Kind word = graalRuntime.getTarget().wordKind; addRuntimeCall(DEOPTIMIZE, config.deoptimizeStub, @@ -80,7 +81,7 @@ /* arg0: object */ arg(0, Kind.Object), /* arg1: lock */ arg(1, word)); - addRuntimeCall(MONITOREXIT, c.monitorExitStub, + addRuntimeCall(MONITOREXIT, config.monitorExitStub, /* temps */ null, /* ret */ ret(Kind.Void), /* arg0: object */ arg(0, Kind.Object), @@ -92,7 +93,7 @@ /* arg0: hub */ rdx.asValue(word), /* arg1: length */ rbx.asValue(Kind.Int)); - addRuntimeCall(NEW_ARRAY_SLOW, c.newArrayStub, + addRuntimeCall(NEW_ARRAY_SLOW, config.newArrayStub, /* temps */ null, /* ret */ rax.asValue(Kind.Object), /* arg0: hub */ rdx.asValue(word), @@ -103,30 +104,40 @@ /* ret */ rax.asValue(Kind.Object), /* arg0: hub */ rdx.asValue(word)); - addRuntimeCall(NEW_INSTANCE_SLOW, c.newInstanceStub, + addRuntimeCall(NEW_INSTANCE_SLOW, config.newInstanceStub, /* temps */ null, /* ret */ rax.asValue(Kind.Object), /* arg0: hub */ rdx.asValue(word)); - addRuntimeCall(NEW_MULTI_ARRAY, c.newMultiArrayStub, + addRuntimeCall(NEW_MULTI_ARRAY, config.newMultiArrayStub, /* temps */ null, /* ret */ rax.asValue(Kind.Object), /* arg0: hub */ rax.asValue(word), /* arg1: rank */ rbx.asValue(Kind.Int), /* arg2: dims */ rcx.asValue(word)); - addRuntimeCall(VERIFY_OOP, c.verifyOopStub, + addRuntimeCall(VERIFY_OOP, config.verifyOopStub, /* temps */ null, /* ret */ ret(Kind.Void), /* arg0: object */ r13.asValue(Kind.Object)); - addRuntimeCall(VM_ERROR, c.vmErrorStub, + addRuntimeCall(VM_ERROR, config.vmErrorStub, /* temps */ null, /* ret */ ret(Kind.Void), /* arg0: where */ arg(0, Kind.Object), /* arg1: format */ arg(1, Kind.Object), /* arg2: value */ arg(2, Kind.Long)); + addRuntimeCall(IDENTITY_HASHCODE, config.identityHashCodeStub, + /* temps */ null, + /* ret */ rax.asValue(Kind.Int), + /* arg0: obj */ rdx.asValue(Kind.Object)); + + addRuntimeCall(THREAD_IS_INTERRUPTED, config.threadIsInterruptedStub, + /* temps */ null, + /* ret */ rax.asValue(Kind.Int), + /* arg0: thread */ arg(0, Kind.Object), + /* arg1: clearInterrupted */ arg(1, Kind.Boolean)); } @Override diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/HotSpotGraalRuntime.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/HotSpotGraalRuntime.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/HotSpotGraalRuntime.java Fri Dec 14 13:02:49 2012 +0100 @@ -37,6 +37,7 @@ import com.oracle.graal.hotspot.meta.*; import com.oracle.graal.nodes.spi.*; import com.oracle.graal.phases.*; +import com.oracle.graal.snippets.*; /** * Singleton class holding the instance of the {@link GraalRuntime}. @@ -257,15 +258,12 @@ @SuppressWarnings("unchecked") @Override public T getCapability(Class clazz) { - if (clazz == GraalCodeCacheProvider.class || clazz == MetaAccessProvider.class) { + if (clazz == GraalCodeCacheProvider.class || clazz == MetaAccessProvider.class || clazz == SnippetProvider.class) { return (T) getRuntime(); } if (clazz == GraalCompiler.class) { return (T) getCompiler(); } - if (clazz == MetaAccessProvider.class) { - return (T) getRuntime(); - } if (clazz == RuntimeInterpreterInterface.class) { return (T) getRuntimeInterpreterInterface(); } diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/HotSpotVMConfig.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/HotSpotVMConfig.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/HotSpotVMConfig.java Fri Dec 14 13:02:49 2012 +0100 @@ -128,6 +128,16 @@ public int threadObjectOffset; /** + * The value of JavaThread::osthread_offset(). + */ + public int osThreadOffset; + + /** + * The value of OSThread::interrupted_offset(). + */ + public int osThreadInterruptedOffset; + + /** * The value of markOopDesc::unlocked_value. */ public int unlockedMask; @@ -153,6 +163,16 @@ public int biasedLockPattern; /** + * Identity hash code value when uninitialized. + */ + public int uninitializedIdentityHashCodeValue; + + /** + * Mark word right shift to get identity hash code. + */ + public int identityHashCodeShift; + + /** * Offset of _access_flags in metaspace Method object. */ public int methodAccessFlagsOffset; @@ -202,6 +222,26 @@ public int klassAccessFlagsOffset; /** + * The offset of the _layout_helper field in a Klass. + */ + public int klassLayoutHelperOffset; + + /** + * Bit pattern in the klass layout helper that can be used to identify arrays. + */ + public int arrayKlassLayoutHelperIdentifier; + + /** + * The offset of the _componentMirror field in an ArrayKlass. + */ + public int arrayKlassComponentMirrorOffset; + + /** + * The offset of the _super field in a Klass. + */ + public int klassSuperKlassOffset; + + /** * The offset of the injected klass field in a {@link Class}. */ public int klassOffset; @@ -286,8 +326,10 @@ public long logPrimitiveStub; public long logObjectStub; public long logPrintfStub; - public int deoptReasonNone; + public long threadIsInterruptedStub; + public long identityHashCodeStub; + public int deoptReasonNullCheck; public int deoptReasonRangeCheck; public int deoptReasonClassCheck; diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/CompilerToVM.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/CompilerToVM.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/CompilerToVM.java Fri Dec 14 13:02:49 2012 +0100 @@ -81,6 +81,14 @@ long getUniqueConcreteMethod(long metaspaceMethod, HotSpotResolvedObjectType[] resultHolder); /** + * Used to determine if an interface has exactly one implementor. + * + * @param interfaceType interface for which the implementor should be returned + * @return the unique implementor of the interface or null if the interface has 0 or more than 1 implementor + */ + ResolvedJavaType getUniqueImplementor(HotSpotResolvedObjectType interfaceType); + + /** * Gets the invocation count for a method. * * @param metaspaceMethod the metaspace Method object to query diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/CompilerToVMImpl.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/CompilerToVMImpl.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/CompilerToVMImpl.java Fri Dec 14 13:02:49 2012 +0100 @@ -69,6 +69,9 @@ public native long getUniqueConcreteMethod(long metaspaceMethod, HotSpotResolvedObjectType[] resultHolder); @Override + public native ResolvedJavaType getUniqueImplementor(HotSpotResolvedObjectType interfaceType); + + @Override public native int getInvocationCount(long metaspaceMethod); @Override diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/VMToCompilerImpl.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/VMToCompilerImpl.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/bridge/VMToCompilerImpl.java Fri Dec 14 13:02:49 2012 +0100 @@ -574,7 +574,7 @@ if (onStackReplacement) { phasePlan.addPhase(PhasePosition.AFTER_PARSING, new OnStackReplacementPhase()); } - if (GraalOptions.Intrinsify) { + if (GraalOptions.Intrinsify && GraalOptions.IntrinsifyArrayCopy) { phasePlan.addPhase(PhasePosition.HIGH_LEVEL, intrinsifyArrayCopy); } return phasePlan; diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotResolvedObjectType.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotResolvedObjectType.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotResolvedObjectType.java Fri Dec 14 13:02:49 2012 +0100 @@ -166,6 +166,8 @@ HotSpotVMConfig config = HotSpotGraalRuntime.getInstance().getConfig(); if (isArray()) { return isFinal(getElementalType(this).getModifiers()) ? this : null; + } else if (isInterface()) { + return HotSpotGraalRuntime.getInstance().getCompilerToVM().getUniqueImplementor(this); } else { HotSpotResolvedObjectType type = this; while (isAbstract(type.getModifiers())) { @@ -175,7 +177,7 @@ } type = (HotSpotResolvedObjectType) fromMetaspaceKlass(subklass); } - if (unsafeReadWord(type.metaspaceKlass + config.subklassOffset) != 0) { + if (isAbstract(type.getModifiers()) || type.isInterface() || unsafeReadWord(type.metaspaceKlass + config.subklassOffset) != 0) { return null; } return type; diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotRuntime.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotRuntime.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/meta/HotSpotRuntime.java Fri Dec 14 13:02:49 2012 +0100 @@ -70,7 +70,7 @@ /** * HotSpot implementation of {@link GraalCodeCacheProvider}. */ -public abstract class HotSpotRuntime implements GraalCodeCacheProvider { +public abstract class HotSpotRuntime implements GraalCodeCacheProvider, SnippetProvider { public final HotSpotVMConfig config; protected final RegisterConfig regConfig; @@ -292,12 +292,24 @@ protected abstract RegisterConfig createRegisterConfig(boolean globalStubConfig); public void installSnippets(SnippetInstaller installer, Assumptions assumptions) { - installer.install(ObjectSnippets.class); - installer.install(ClassSnippets.class); - installer.install(ThreadSnippets.class); - installer.install(SystemSnippets.class); - installer.install(UnsafeSnippets.class); - installer.install(ArrayCopySnippets.class); + if (GraalOptions.IntrinsifyObjectMethods) { + installer.install(ObjectSnippets.class); + } + if (GraalOptions.IntrinsifySystemMethods) { + installer.install(SystemSnippets.class); + } + if (GraalOptions.IntrinsifyThreadMethods) { + installer.install(ThreadSnippets.class); + } + if (GraalOptions.IntrinsifyUnsafeMethods) { + installer.install(UnsafeSnippets.class); + } + if (GraalOptions.IntrinsifyClassMethods) { + installer.install(ClassSnippets.class); + } + if (GraalOptions.IntrinsifyArrayCopy) { + installer.install(ArrayCopySnippets.class); + } installer.install(CheckCastSnippets.class); installer.install(InstanceOfSnippets.class); diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/nodes/CurrentThread.java diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/nodes/DirectStoreNode.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/nodes/DirectStoreNode.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/nodes/DirectStoreNode.java Fri Dec 14 13:02:49 2012 +0100 @@ -44,13 +44,28 @@ } @NodeIntrinsic - public static native void store(long address, long value); + public static native void store(long address, boolean value); + + @NodeIntrinsic + public static native void store(long address, byte value); + + @NodeIntrinsic + public static native void store(long address, short value); + + @NodeIntrinsic + public static native void store(long address, char value); @NodeIntrinsic public static native void store(long address, int value); @NodeIntrinsic - public static native void store(long address, boolean value); + public static native void store(long address, long value); + + @NodeIntrinsic + public static native void store(long address, float value); + + @NodeIntrinsic + public static native void store(long address, double value); @Override public void generate(LIRGeneratorTool gen) { diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/nodes/HotSpotCurrentRawThreadNode.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/nodes/HotSpotCurrentRawThreadNode.java Fri Dec 14 13:02:49 2012 +0100 @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.graal.hotspot.nodes; + +import com.oracle.graal.api.code.*; +import com.oracle.graal.hotspot.*; +import com.oracle.graal.nodes.calc.*; +import com.oracle.graal.nodes.spi.*; +import com.oracle.graal.nodes.type.*; +import com.oracle.graal.snippets.*; + + +public class HotSpotCurrentRawThreadNode extends FloatingNode implements LIRLowerable { + public HotSpotCurrentRawThreadNode() { + super(StampFactory.forWord()); + } + + @Override + public void generate(LIRGeneratorTool gen) { + Register rawThread = HotSpotGraalRuntime.getInstance().getRuntime().threadRegister(); + gen.setResult(this, rawThread.asValue(this.kind())); + } + + @NodeIntrinsic + public static native Word get(); +} diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/nodes/IdentityHashCodeStubCall.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/nodes/IdentityHashCodeStubCall.java Fri Dec 14 13:02:49 2012 +0100 @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.graal.hotspot.nodes; + +import com.oracle.graal.api.code.*; +import com.oracle.graal.api.code.RuntimeCallTarget.Descriptor; +import com.oracle.graal.api.meta.*; +import com.oracle.graal.compiler.gen.*; +import com.oracle.graal.compiler.target.*; +import com.oracle.graal.lir.*; +import com.oracle.graal.nodes.*; +import com.oracle.graal.nodes.type.*; + +/** + * Node implementing a call to HotSpot's {@code graal_identityhashcode} stub. + */ +public class IdentityHashCodeStubCall extends FixedWithNextNode implements LIRGenLowerable { + @Input private final ValueNode object; + public static final Descriptor IDENTITY_HASHCODE = new Descriptor("identity_hashcode", false, Kind.Int, Kind.Object); + + public IdentityHashCodeStubCall(ValueNode object) { + super(StampFactory.forKind(Kind.Int)); + this.object = object; + } + + @Override + public void generate(LIRGenerator gen) { + RuntimeCallTarget stub = gen.getRuntime().lookupRuntimeCall(IdentityHashCodeStubCall.IDENTITY_HASHCODE); + Variable result = gen.emitCall(stub, stub.getCallingConvention(), true, gen.operand(object)); + gen.setResult(this, result); + } + + @NodeIntrinsic + public static native int call(Object object); +} diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/nodes/ThreadIsInterruptedStubCall.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/nodes/ThreadIsInterruptedStubCall.java Fri Dec 14 13:02:49 2012 +0100 @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.graal.hotspot.nodes; + +import com.oracle.graal.api.code.*; +import com.oracle.graal.api.code.RuntimeCallTarget.Descriptor; +import com.oracle.graal.api.meta.*; +import com.oracle.graal.compiler.gen.*; +import com.oracle.graal.compiler.target.*; +import com.oracle.graal.lir.*; +import com.oracle.graal.nodes.*; +import com.oracle.graal.nodes.type.*; + +/** + * Node implementing a call to HotSpot's ThreadIsInterrupted stub. + */ +public class ThreadIsInterruptedStubCall extends FixedWithNextNode implements LIRGenLowerable { + @Input private final ValueNode thread; + @Input private final ValueNode clearIsInterrupted; + public static final Descriptor THREAD_IS_INTERRUPTED = new Descriptor("thread_is_interrupted", false, Kind.Int, Kind.Object, Kind.Boolean); + + public ThreadIsInterruptedStubCall(ValueNode thread, ValueNode clearIsInterrupted) { + super(StampFactory.forInteger(Kind.Int, 0, 1)); + this.thread = thread; + this.clearIsInterrupted = clearIsInterrupted; + } + + @Override + public void generate(LIRGenerator gen) { + RuntimeCallTarget stub = gen.getRuntime().lookupRuntimeCall(ThreadIsInterruptedStubCall.THREAD_IS_INTERRUPTED); + Variable result = gen.emitCall(stub, stub.getCallingConvention(), true, gen.operand(thread), gen.operand(clearIsInterrupted)); + gen.setResult(this, result); + } + + @NodeIntrinsic + public static native int call(Thread thread, boolean clearIsInterrupted); +} diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/ClassSnippets.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/ClassSnippets.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/ClassSnippets.java Fri Dec 14 13:02:49 2012 +0100 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -33,9 +33,9 @@ */ @ClassSubstitution(java.lang.Class.class) public class ClassSnippets implements SnippetsInterface { - - public int getModifiers() { - Word klass = loadWordFromObject(this, klassOffset()); + @InstanceMethodSubstitution + public static int getModifiers(final Class thisObj) { + Word klass = loadWordFromObject(thisObj, klassOffset()); if (klass == Word.zero()) { // Class for primitive type return Modifier.ABSTRACT | Modifier.FINAL | Modifier.PUBLIC; @@ -43,4 +43,66 @@ return loadIntFromWord(klass, klassModifierFlagsOffset()); } } + + @InstanceMethodSubstitution + public static boolean isInterface(final Class thisObj) { + Word klass = loadWordFromObject(thisObj, klassOffset()); + if (klass == Word.zero()) { + return false; + } else { + int accessFlags = loadIntFromWord(klass, klassAccessFlagsOffset()); + return (accessFlags & Modifier.INTERFACE) != 0; + } + } + + @InstanceMethodSubstitution + public static boolean isArray(final Class thisObj) { + Word klass = loadWordFromObject(thisObj, klassOffset()); + if (klass == Word.zero()) { + return false; + } else { + int layoutHelper = loadIntFromWord(klass, klassLayoutHelperOffset()); + return (layoutHelper & arrayKlassLayoutHelperIdentifier()) != 0; + } + } + + @InstanceMethodSubstitution + public static boolean isPrimitive(final Class thisObj) { + Word klass = loadWordFromObject(thisObj, klassOffset()); + return klass == Word.zero(); + } + + @InstanceMethodSubstitution + public static Class getSuperclass(final Class thisObj) { + Word klass = loadWordFromObject(thisObj, klassOffset()); + if (klass != Word.zero()) { + int accessFlags = loadIntFromWord(klass, klassAccessFlagsOffset()); + if ((accessFlags & Modifier.INTERFACE) == 0) { + int layoutHelper = loadIntFromWord(klass, klassLayoutHelperOffset()); + if ((layoutHelper & arrayKlassLayoutHelperIdentifier()) != 0) { + return Object.class; + } else { + Word superKlass = loadWordFromWord(klass, klassSuperKlassOffset()); + if (superKlass == Word.zero()) { + return null; + } else { + return (Class) loadObjectFromWord(superKlass, classMirrorOffset()); + } + } + } + } + return null; + } + + @InstanceMethodSubstitution + public static Class getComponentType(final Class thisObj) { + Word klass = loadWordFromObject(thisObj, klassOffset()); + if (klass != Word.zero()) { + int layoutHelper = loadIntFromWord(klass, klassLayoutHelperOffset()); + if ((layoutHelper & arrayKlassLayoutHelperIdentifier()) != 0) { + return (Class) loadObjectFromWord(klass, arrayKlassComponentMirrorOffset()); + } + } + return null; + } } diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/HotSpotSnippetUtils.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/HotSpotSnippetUtils.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/HotSpotSnippetUtils.java Fri Dec 14 13:02:49 2012 +0100 @@ -60,6 +60,21 @@ } @Fold + public static int threadObjectOffset() { + return HotSpotGraalRuntime.getInstance().getConfig().threadObjectOffset; + } + + @Fold + public static int osThreadOffset() { + return HotSpotGraalRuntime.getInstance().getConfig().osThreadOffset; + } + + @Fold + public static int osThreadInterruptedOffset() { + return HotSpotGraalRuntime.getInstance().getConfig().osThreadInterruptedOffset; + } + + @Fold public static Kind wordKind() { return HotSpotGraalRuntime.getInstance().getTarget().wordKind; } @@ -95,6 +110,46 @@ } @Fold + public static int klassOffset() { + return HotSpotGraalRuntime.getInstance().getConfig().klassOffset; + } + + @Fold + public static int klassModifierFlagsOffset() { + return HotSpotGraalRuntime.getInstance().getConfig().klassModifierFlagsOffset; + } + + @Fold + public static int klassAccessFlagsOffset() { + return HotSpotGraalRuntime.getInstance().getConfig().klassAccessFlagsOffset; + } + + @Fold + public static int klassLayoutHelperOffset() { + return HotSpotGraalRuntime.getInstance().getConfig().klassLayoutHelperOffset; + } + + @Fold + public static int arrayKlassLayoutHelperIdentifier() { + return HotSpotGraalRuntime.getInstance().getConfig().arrayKlassLayoutHelperIdentifier; + } + + @Fold + public static int arrayKlassComponentMirrorOffset() { + return HotSpotGraalRuntime.getInstance().getConfig().arrayKlassComponentMirrorOffset; + } + + @Fold + public static int klassSuperKlassOffset() { + return HotSpotGraalRuntime.getInstance().getConfig().klassSuperKlassOffset; + } + + @Fold + public static int classMirrorOffset() { + return HotSpotGraalRuntime.getInstance().getConfig().classMirrorOffset; + } + + @Fold public static int markOffset() { return config().markOffset; } @@ -207,6 +262,16 @@ return config().useBiasedLocking; } + @Fold + static int uninitializedIdentityHashCodeValue() { + return HotSpotGraalRuntime.getInstance().getConfig().uninitializedIdentityHashCodeValue; + } + + @Fold + static int identityHashCodeShift() { + return HotSpotGraalRuntime.getInstance().getConfig().identityHashCodeShift; + } + /** * Loads the hub from a object, null checking it first. */ @@ -244,6 +309,10 @@ return loadWordFromWordIntrinsic(address, 0, offset, wordKind()); } + static Object loadObjectFromWord(Word address, int offset) { + return UnsafeLoadNode.load(address, 0, offset, Kind.Object); + } + public static Word loadWordFromObject(Object object, int offset) { return loadWordFromObjectIntrinsic(object, 0, offset, wordKind()); } diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/ObjectSnippets.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/ObjectSnippets.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/ObjectSnippets.java Fri Dec 14 13:02:49 2012 +0100 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -24,6 +24,7 @@ import static com.oracle.graal.hotspot.snippets.HotSpotSnippetUtils.*; +import com.oracle.graal.hotspot.nodes.*; import com.oracle.graal.snippets.*; import com.oracle.graal.snippets.ClassSubstitution.MethodSubstitution; @@ -32,11 +33,25 @@ */ @ClassSubstitution(java.lang.Object.class) public class ObjectSnippets implements SnippetsInterface { + @InstanceMethodSubstitution("getClass") + public static Class getClassSnippet(final Object thisObj) { + Word hub = loadHub(thisObj); + return (Class) readFinalObject(hub, classMirrorOffset()); + } - @MethodSubstitution("getClass") - public Class getClass_() { - Word hub = loadHub(this); - Object mirror = readFinalObject(hub, classMirrorOffset()); - return (Class) mirror; + @InstanceMethodSubstitution + public static int hashCode(final Object thisObj) { + Word mark = loadWordFromObject(thisObj, markOffset()); + + // this code is independent from biased locking (although it does not look that way) + final Word biasedLock = mark.and(biasedLockMaskInPlace()); + if (biasedLock.toLong() == unlockedMask()) { + int hash = (int) (mark.toLong() >>> identityHashCodeShift()); + if (hash != uninitializedIdentityHashCodeValue()) { + return hash; + } + } + + return IdentityHashCodeStubCall.call(thisObj); } } diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/SystemSnippets.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/SystemSnippets.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/SystemSnippets.java Fri Dec 14 13:02:49 2012 +0100 @@ -23,9 +23,12 @@ package com.oracle.graal.hotspot.snippets; import com.oracle.graal.api.code.RuntimeCallTarget.Descriptor; +import static com.oracle.graal.hotspot.snippets.HotSpotSnippetUtils.*; + import com.oracle.graal.api.meta.*; import com.oracle.graal.graph.Node.ConstantNodeParameter; import com.oracle.graal.graph.Node.NodeIntrinsic; +import com.oracle.graal.hotspot.nodes.*; import com.oracle.graal.nodes.extended.*; import com.oracle.graal.snippets.*; @@ -46,7 +49,25 @@ return callLong(JAVA_TIME_NANOS); } + public static int identityHashCode(Object x) { + if (x == null) { + return 0; + } + + Word mark = loadWordFromObject(x, markOffset()); + + // this code is independent from biased locking (although it does not look that way) + final Word biasedLock = mark.and(biasedLockMaskInPlace()); + if (biasedLock.toLong() == unlockedMask()) { + int hash = (int) (mark.toLong() >>> identityHashCodeShift()); + if (hash != uninitializedIdentityHashCodeValue()) { + return hash; + } + } + + return IdentityHashCodeStubCall.call(x); + } + @NodeIntrinsic(value = RuntimeCallNode.class, setStampFromReturnType = true) public static native long callLong(@ConstantNodeParameter Descriptor descriptor); - } diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/ThreadSnippets.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/ThreadSnippets.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/ThreadSnippets.java Fri Dec 14 13:02:49 2012 +0100 @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. + * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it @@ -22,16 +22,33 @@ */ package com.oracle.graal.hotspot.snippets; +import static com.oracle.graal.hotspot.snippets.HotSpotSnippetUtils.*; + import com.oracle.graal.hotspot.nodes.*; import com.oracle.graal.snippets.*; -/** + * Snippets for {@link java.lang.Thread} methods. */ @ClassSubstitution(java.lang.Thread.class) public class ThreadSnippets implements SnippetsInterface { - public static Thread currentThread() { return CurrentThread.get(); } + + @InstanceMethodSubstitution + @SuppressWarnings("unused") + private static boolean isInterrupted(final Thread thisObject, boolean clearInterrupted) { + Word rawThread = HotSpotCurrentRawThreadNode.get(); + Thread thread = (Thread) loadObjectFromWord(rawThread, threadObjectOffset()); + if (thisObject == thread) { + Word osThread = loadWordFromWord(rawThread, osThreadOffset()); + boolean interrupted = loadIntFromWord(osThread, osThreadInterruptedOffset()) != 0; + if (!interrupted || !clearInterrupted) { + return interrupted; + } + } + + return ThreadIsInterruptedStubCall.call(thisObject, clearInterrupted) != 0; + } } diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/UnsafeSnippets.java --- a/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/UnsafeSnippets.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.hotspot/src/com/oracle/graal/hotspot/snippets/UnsafeSnippets.java Fri Dec 14 13:02:49 2012 +0100 @@ -68,6 +68,12 @@ MembarNode.memoryBarrier(MemoryBarriers.JMM_POST_VOLATILE_WRITE); } + public void putOrderedObject(Object o, long offset, Object x) { + MembarNode.memoryBarrier(MemoryBarriers.JMM_PRE_VOLATILE_WRITE); + putObject(o, offset, x); + MembarNode.memoryBarrier(MemoryBarriers.JMM_POST_VOLATILE_WRITE); + } + public int getInt(Object o, long offset) { Integer value = UnsafeLoadNode.load(o, 0, offset, Kind.Int); return value; @@ -90,6 +96,12 @@ MembarNode.memoryBarrier(MemoryBarriers.JMM_POST_VOLATILE_WRITE); } + public void putOrderedInt(Object o, long offset, int x) { + MembarNode.memoryBarrier(MemoryBarriers.JMM_PRE_VOLATILE_WRITE); + putInt(o, offset, x); + MembarNode.memoryBarrier(MemoryBarriers.JMM_POST_VOLATILE_WRITE); + } + public boolean getBoolean(Object o, long offset) { @JavacBug(id = 6995200) Boolean result = UnsafeLoadNode.load(o, 0, offset, Kind.Boolean); @@ -205,6 +217,12 @@ MembarNode.memoryBarrier(MemoryBarriers.JMM_POST_VOLATILE_WRITE); } + public void putOrderedLong(Object o, long offset, long x) { + MembarNode.memoryBarrier(MemoryBarriers.JMM_PRE_VOLATILE_WRITE); + putLong(o, offset, x); + MembarNode.memoryBarrier(MemoryBarriers.JMM_POST_VOLATILE_WRITE); + } + public float getFloat(Object o, long offset) { @JavacBug(id = 6995200) Float result = UnsafeLoadNode.load(o, 0, offset, Kind.Float); @@ -251,6 +269,18 @@ MembarNode.memoryBarrier(MemoryBarriers.JMM_POST_VOLATILE_WRITE); } + public void putByte(long address, byte value) { + DirectStoreNode.store(address, value); + } + + public void putShort(long address, short value) { + DirectStoreNode.store(address, value); + } + + public void putChar(long address, char value) { + DirectStoreNode.store(address, value); + } + public void putInt(long address, int value) { DirectStoreNode.store(address, value); } @@ -259,6 +289,26 @@ DirectStoreNode.store(address, value); } + public void putFloat(long address, float value) { + DirectStoreNode.store(address, value); + } + + public void putDouble(long address, double value) { + DirectStoreNode.store(address, value); + } + + public byte getByte(long address) { + return DirectReadNode.read(address, Kind.Byte); + } + + public short getShort(long address) { + return DirectReadNode.read(address, Kind.Short); + } + + public char getChar(long address) { + return DirectReadNode.read(address, Kind.Char); + } + public int getInt(long address) { return DirectReadNode.read(address, Kind.Int); } @@ -266,4 +316,12 @@ public long getLong(long address) { return DirectReadNode.read(address, Kind.Long); } + + public float getFloat(long address) { + return DirectReadNode.read(address, Kind.Float); + } + + public double getDouble(long address) { + return DirectReadNode.read(address, Kind.Double); + } } diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.lir.amd64/src/com/oracle/graal/lir/amd64/AMD64Call.java --- a/graal/com.oracle.graal.lir.amd64/src/com/oracle/graal/lir/amd64/AMD64Call.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.lir.amd64/src/com/oracle/graal/lir/amd64/AMD64Call.java Fri Dec 14 13:02:49 2012 +0100 @@ -65,9 +65,7 @@ // make sure that the displacement word of the call ends up word aligned int offset = masm.codeBuffer.position(); offset += tasm.target.arch.getMachineCodeCallDisplacementOffset(); - while (offset++ % tasm.target.wordSize != 0) { - masm.nop(); - } + masm.nop(tasm.target.wordSize - offset % tasm.target.wordSize); } } diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.lir.amd64/src/com/oracle/graal/lir/amd64/AMD64MathIntrinsicOp.java --- a/graal/com.oracle.graal.lir.amd64/src/com/oracle/graal/lir/amd64/AMD64MathIntrinsicOp.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.lir.amd64/src/com/oracle/graal/lir/amd64/AMD64MathIntrinsicOp.java Fri Dec 14 13:02:49 2012 +0100 @@ -33,7 +33,7 @@ public enum IntrinsicOpcode { SQRT, SIN, COS, TAN, - LOG, LOG10; + LOG, LOG10 } @Opcode private final IntrinsicOpcode opcode; diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/extended/LoadHubNode.java --- a/graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/extended/LoadHubNode.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.nodes/src/com/oracle/graal/nodes/extended/LoadHubNode.java Fri Dec 14 13:02:49 2012 +0100 @@ -22,8 +22,8 @@ */ package com.oracle.graal.nodes.extended; +import com.oracle.graal.api.meta.*; import com.oracle.graal.api.meta.ResolvedJavaType.Representation; -import com.oracle.graal.api.meta.*; import com.oracle.graal.nodes.*; import com.oracle.graal.nodes.spi.*; import com.oracle.graal.nodes.type.*; diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.phases.common/src/com/oracle/graal/phases/common/InliningUtil.java --- a/graal/com.oracle.graal.phases.common/src/com/oracle/graal/phases/common/InliningUtil.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.phases.common/src/com/oracle/graal/phases/common/InliningUtil.java Fri Dec 14 13:02:49 2012 +0100 @@ -27,6 +27,7 @@ import java.util.concurrent.*; import com.oracle.graal.api.code.*; +import com.oracle.graal.api.code.Assumptions.Assumption; import com.oracle.graal.api.meta.*; import com.oracle.graal.api.meta.JavaTypeProfile.ProfiledType; import com.oracle.graal.api.meta.ResolvedJavaType.Representation; @@ -90,9 +91,13 @@ } private static InlineInfo logNotInlinedMethodAndReturnNull(Invoke invoke, ResolvedJavaMethod method, String msg) { + return logNotInlinedMethodAndReturnNull(invoke, method, msg, new Object[0]); + } + + private static InlineInfo logNotInlinedMethodAndReturnNull(Invoke invoke, ResolvedJavaMethod method, String msg, Object... args) { if (shouldLogInliningDecision()) { String methodString = methodName(method, invoke); - logInliningDecision(methodString, false, msg, new Object[0]); + logInliningDecision(methodString, false, msg, args); } return null; } @@ -285,7 +290,7 @@ @Override public String toString() { - return "type-checked " + MetaUtil.format("%H.%n(%p):%r", concrete); + return "type-checked with type " + type.getName() + " and method " + MetaUtil.format("%H.%n(%p):%r", concrete); } } @@ -295,15 +300,15 @@ */ private static class MultiTypeGuardInlineInfo extends AbstractInlineInfo { public final List concretes; - public final ProfiledType[] ptypes; + public final ArrayList ptypes; public final int[] typesToConcretes; public final double notRecordedTypeProbability; - public MultiTypeGuardInlineInfo(Invoke invoke, double weight, List concretes, ProfiledType[] ptypes, + public MultiTypeGuardInlineInfo(Invoke invoke, double weight, ArrayList concretes, ArrayList ptypes, int[] typesToConcretes, double notRecordedTypeProbability) { super(invoke, weight); - assert concretes.size() > 0 && concretes.size() <= ptypes.length : "must have at least one method but no more than types methods"; - assert ptypes.length == typesToConcretes.length : "array lengths must match"; + assert concretes.size() > 0 && concretes.size() <= ptypes.size() : "must have at least one method but no more than types methods"; + assert ptypes.size() == typesToConcretes.length : "array lengths must match"; this.concretes = concretes; this.ptypes = ptypes; @@ -374,7 +379,7 @@ double probability = 0; for (int j = 0; j < typesToConcretes.length; j++) { if (typesToConcretes[j] == i) { - probability += ptypes[j].getProbability(); + probability += ptypes.get(j).getProbability(); } } @@ -479,9 +484,9 @@ for (int i = 0; i < typesToConcretes.length; i++) { if (typesToConcretes[i] == concreteMethodIndex) { if (commonType == null) { - commonType = ptypes[i].getType(); + commonType = ptypes.get(i).getType(); } else { - commonType = commonType.findLeastCommonAncestor(ptypes[i].getType()); + commonType = commonType.findLeastCommonAncestor(ptypes.get(i).getType()); } } } @@ -490,7 +495,7 @@ } private void inlineSingleMethod(StructuredGraph graph, InliningCallback callback, Assumptions assumptions) { - assert concretes.size() == 1 && ptypes.length > 1 && !shouldFallbackToInvoke() && notRecordedTypeProbability == 0; + assert concretes.size() == 1 && ptypes.size() > 1 && !shouldFallbackToInvoke() && notRecordedTypeProbability == 0; BeginNode calleeEntryNode = graph.add(new BeginNode()); calleeEntryNode.setProbability(invoke.probability()); @@ -513,14 +518,14 @@ } private FixedNode createDispatchOnType(StructuredGraph graph, LoadHubNode hub, BeginNode[] successors) { - assert ptypes.length > 1; + assert ptypes.size() > 1; - ResolvedJavaType[] keys = new ResolvedJavaType[ptypes.length]; - double[] keyProbabilities = new double[ptypes.length + 1]; - int[] keySuccessors = new int[ptypes.length + 1]; - for (int i = 0; i < ptypes.length; i++) { - keys[i] = ptypes[i].getType(); - keyProbabilities[i] = ptypes[i].getProbability(); + ResolvedJavaType[] keys = new ResolvedJavaType[ptypes.size()]; + double[] keyProbabilities = new double[ptypes.size() + 1]; + int[] keySuccessors = new int[ptypes.size() + 1]; + for (int i = 0; i < ptypes.size(); i++) { + keys[i] = ptypes.get(i).getType(); + keyProbabilities[i] = ptypes.get(i).getProbability(); keySuccessors[i] = typesToConcretes[i]; assert keySuccessors[i] < successors.length - 1 : "last successor is the unknownTypeSux"; } @@ -594,10 +599,21 @@ @Override public String toString() { StringBuilder builder = new StringBuilder(shouldFallbackToInvoke() ? "megamorphic" : "polymorphic"); - builder.append(String.format(", %d methods with %d type checks:", concretes.size(), ptypes.length)); + builder.append(", "); + builder.append(concretes.size()); + builder.append(" methods [ "); for (int i = 0; i < concretes.size(); i++) { builder.append(MetaUtil.format(" %H.%n(%p):%r", concretes.get(i))); } + builder.append(" ], "); + builder.append(ptypes.size()); + builder.append(" type checks [ "); + for (int i = 0; i < ptypes.size(); i++) { + builder.append(" "); + builder.append(ptypes.get(i).getType().getName()); + builder.append(ptypes.get(i).getProbability()); + } + builder.append(" ]"); return builder.toString(); } } @@ -608,21 +624,17 @@ * but for which an assumption has to be registered because of non-final classes. */ private static class AssumptionInlineInfo extends ExactInlineInfo { - public final ResolvedJavaType context; + private final Assumption takenAssumption; - public AssumptionInlineInfo(Invoke invoke, double weight, ResolvedJavaType context, ResolvedJavaMethod concrete) { + public AssumptionInlineInfo(Invoke invoke, double weight, ResolvedJavaMethod concrete, Assumption takenAssumption) { super(invoke, weight, concrete); - this.context = context; + this.takenAssumption = takenAssumption; } @Override public void inline(StructuredGraph graph, GraalCodeCacheProvider runtime, InliningCallback callback, Assumptions assumptions) { - if (Debug.isLogEnabled()) { - String targetName = MetaUtil.format("%H.%n(%p):%r", invoke.methodCallTarget().targetMethod()); - String concreteName = MetaUtil.format("%H.%n(%p):%r", concrete); - Debug.log("recording concrete method assumption: %s on receiver type %s -> %s", targetName, context, concreteName); - } - assumptions.recordConcreteMethod(invoke.methodCallTarget().targetMethod(), context, concrete); + assumptions.record(takenAssumption); + Debug.log("recording assumption: %s", takenAssumption); super.inline(graph, runtime, callback, assumptions); } @@ -649,68 +661,90 @@ ResolvedJavaMethod targetMethod = callTarget.targetMethod(); if (callTarget.invokeKind() == InvokeKind.Special || targetMethod.canBeStaticallyBound()) { - if (!checkTargetConditions(invoke, targetMethod, optimisticOpts)) { - return null; - } - double weight = inliningPolicy.inliningWeight(caller, targetMethod, invoke); - return new ExactInlineInfo(invoke, weight, targetMethod); + return getExactInlineInfo(invoke, runtime, inliningPolicy, optimisticOpts, caller, targetMethod); } + + assert callTarget.invokeKind() == InvokeKind.Virtual || callTarget.invokeKind() == InvokeKind.Interface; + + ResolvedJavaType holder = targetMethod.getDeclaringClass(); ObjectStamp receiverStamp = callTarget.receiver().objectStamp(); - ResolvedJavaType receiverType = receiverStamp.type(); - if (receiverStamp.isExactType()) { - assert targetMethod.getDeclaringClass().isAssignableFrom(receiverType) : receiverType + " subtype of " + targetMethod.getDeclaringClass() + " for " + targetMethod; - ResolvedJavaMethod resolved = receiverType.resolveMethod(targetMethod); - if (!checkTargetConditions(invoke, resolved, optimisticOpts)) { - return null; - } - double weight = inliningPolicy.inliningWeight(caller, resolved, invoke); - return new ExactInlineInfo(invoke, weight, resolved); - } - ResolvedJavaType holder = targetMethod.getDeclaringClass(); - if (receiverStamp.type() != null) { // the invoke target might be more specific than the holder (happens after inlining: locals lose their declared type...) - // TODO (lstadler) fix this + ResolvedJavaType receiverType = receiverStamp.type(); if (receiverType != null && holder.isAssignableFrom(receiverType)) { holder = receiverType; - } - } - // TODO (thomaswue) fix this - if (assumptions.useOptimisticAssumptions()) { - ResolvedJavaMethod concrete = holder.findUniqueConcreteMethod(targetMethod); - if (concrete != null) { - if (!checkTargetConditions(invoke, concrete, optimisticOpts)) { - return null; + if (receiverStamp.isExactType()) { + assert targetMethod.getDeclaringClass().isAssignableFrom(holder) : holder + " subtype of " + targetMethod.getDeclaringClass() + " for " + targetMethod; + return getExactInlineInfo(invoke, runtime, inliningPolicy, optimisticOpts, caller, holder.resolveMethod(targetMethod)); } - double weight = inliningPolicy.inliningWeight(caller, concrete, invoke); - return new AssumptionInlineInfo(invoke, weight, holder, concrete); } } + if (holder.isArray()) { + // arrays can be treated as Objects + return getExactInlineInfo(invoke, runtime, inliningPolicy, optimisticOpts, caller, holder.resolveMethod(targetMethod)); + } + + // TODO (chaeubl): we could also use the type determined after assumptions for the type-checked inlining case as it might have an effect on type filtering + if (assumptions.useOptimisticAssumptions()) { + ResolvedJavaType uniqueSubtype = holder.findUniqueConcreteSubtype(); + if (uniqueSubtype != null) { + return getAssumptionInlineInfo(invoke, runtime, inliningPolicy, optimisticOpts, caller, uniqueSubtype.resolveMethod(targetMethod), new Assumptions.ConcreteSubtype(holder, uniqueSubtype)); + } + + ResolvedJavaMethod concrete = holder.findUniqueConcreteMethod(targetMethod); + if (concrete != null) { + return getAssumptionInlineInfo(invoke, runtime, inliningPolicy, optimisticOpts, caller, concrete, new Assumptions.ConcreteMethod(targetMethod, holder, concrete)); + } + + // TODO (chaeubl): C1 has one more assumption in the case of interfaces + } + // type check based inlining - return getTypeCheckedInlineInfo(invoke, inliningPolicy, caller, targetMethod, optimisticOpts); + return getTypeCheckedInlineInfo(invoke, inliningPolicy, caller, holder, targetMethod, optimisticOpts, runtime); + } + + private static InlineInfo getAssumptionInlineInfo(Invoke invoke, InliningPolicy inliningPolicy, OptimisticOptimizations optimisticOpts, + ResolvedJavaMethod caller, ResolvedJavaMethod concrete, Assumption takenAssumption) { + assert !Modifier.isAbstract(concrete.getModifiers()); + if (!checkTargetConditions(invoke, concrete, optimisticOpts)) { + return null; + } + double weight = inliningPolicy.inliningWeight(caller, concrete, invoke); + return new AssumptionInlineInfo(invoke, weight, concrete, takenAssumption); + } + + private static InlineInfo getExactInlineInfo(Invoke invoke, GraalCodeCacheProvider runtime, InliningPolicy inliningPolicy, OptimisticOptimizations optimisticOpts, + ResolvedJavaMethod caller, ResolvedJavaMethod targetMethod) { + assert !Modifier.isAbstract(targetMethod.getModifiers()); + if (!checkTargetConditions(invoke, targetMethod, optimisticOpts, runtime)) { + return null; + } + double weight = inliningPolicy.inliningWeight(caller, targetMethod, invoke); + return new ExactInlineInfo(invoke, weight, targetMethod); } private static InlineInfo getTypeCheckedInlineInfo(Invoke invoke, InliningPolicy inliningPolicy, ResolvedJavaMethod caller, - ResolvedJavaMethod targetMethod, OptimisticOptimizations optimisticOpts) { + ResolvedJavaType holder, ResolvedJavaMethod targetMethod, OptimisticOptimizations optimisticOpts) { ProfilingInfo profilingInfo = caller.getProfilingInfo(); JavaTypeProfile typeProfile = profilingInfo.getTypeProfile(invoke.bci()); if (typeProfile == null) { return logNotInlinedMethodAndReturnNull(invoke, targetMethod, "no type profile exists"); } - ProfiledType[] ptypes = typeProfile.getTypes(); - if (ptypes == null || ptypes.length <= 0) { - return logNotInlinedMethodAndReturnNull(invoke, targetMethod, "no types/probabilities were recorded"); + ProfiledType[] rawProfiledTypes = typeProfile.getTypes(); + ArrayList ptypes = getCompatibleTypes(rawProfiledTypes, holder); + if (ptypes == null || ptypes.size() <= 0) { + return logNotInlinedMethodAndReturnNull(invoke, targetMethod, "no types remained after filtering (%d types were recorded)", rawProfiledTypes.length); } double notRecordedTypeProbability = typeProfile.getNotRecordedProbability(); - if (ptypes.length == 1 && notRecordedTypeProbability == 0) { + if (ptypes.size() == 1 && notRecordedTypeProbability == 0) { if (!optimisticOpts.inlineMonomorphicCalls()) { return logNotInlinedMethodAndReturnNull(invoke, targetMethod, "inlining monomorphic calls is disabled"); } - ResolvedJavaType type = ptypes[0].getType(); + ResolvedJavaType type = ptypes.get(0).getType(); ResolvedJavaMethod concrete = type.resolveMethod(targetMethod); if (!checkTargetConditions(invoke, concrete, optimisticOpts)) { return null; @@ -720,12 +754,11 @@ } else { invoke.setPolymorphic(true); - if (!optimisticOpts.inlinePolymorphicCalls() && notRecordedTypeProbability == 0) { - return logNotInlinedMethodAndReturnNull(invoke, targetMethod, "inlining polymorphic calls is disabled"); + return logNotInlinedMethodAndReturnNull(invoke, targetMethod, "inlining polymorphic calls is disabled (%d types)", ptypes.size()); } if (!optimisticOpts.inlineMegamorphicCalls() && notRecordedTypeProbability > 0) { - return logNotInlinedMethodAndReturnNull(invoke, targetMethod, "inlining megamorphic calls is disabled"); + return logNotInlinedMethodAndReturnNull(invoke, targetMethod, "inlining megamorphic calls is disabled (%d types, %f %% not recorded types)", ptypes.size(), notRecordedTypeProbability * 100); } // TODO (chaeubl) inlining of multiple methods should work differently @@ -737,9 +770,9 @@ // determine concrete methods and map type to specific method ArrayList concreteMethods = new ArrayList<>(); - int[] typesToConcretes = new int[ptypes.length]; - for (int i = 0; i < ptypes.length; i++) { - ResolvedJavaMethod concrete = ptypes[i].getType().resolveMethod(targetMethod); + int[] typesToConcretes = new int[ptypes.size()]; + for (int i = 0; i < ptypes.size(); i++) { + ResolvedJavaMethod concrete = ptypes.get(i).getType().resolveMethod(targetMethod); int index = concreteMethods.indexOf(concrete); if (index < 0) { @@ -761,6 +794,19 @@ } + private static ArrayList getCompatibleTypes(ProfiledType[] types, ResolvedJavaType holder) { + ArrayList result = new ArrayList<>(); + for (int i = 0; i < types.length; i++) { + ProfiledType ptype = types[i]; + ResolvedJavaType type = ptype.getType(); + assert !type.isInterface() && !Modifier.isAbstract(type.getModifiers()); + if (holder.isAssignableFrom(type)) { + result.add(ptype); + } + } + return result; + } + private static ResolvedJavaMethod getCaller(Invoke invoke) { return invoke.stateAfter().method(); } diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.phases/src/com/oracle/graal/phases/GraalOptions.java --- a/graal/com.oracle.graal.phases/src/com/oracle/graal/phases/GraalOptions.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.phases/src/com/oracle/graal/phases/GraalOptions.java Fri Dec 14 13:02:49 2012 +0100 @@ -199,6 +199,15 @@ public static boolean OptTailDuplication = true; public static boolean OptEliminatePartiallyRedundantGuards = true; + // Intrinsification settings + public static boolean IntrinsifyArrayCopy = true; + public static boolean IntrinsifyObjectMethods = true; + public static boolean IntrinsifySystemMethods = true; + public static boolean IntrinsifyClassMethods = true; + public static boolean IntrinsifyThreadMethods = true; + public static boolean IntrinsifyUnsafeMethods = true; + public static boolean IntrinsifyMathMethods = true; + /** * Counts the various paths taken through snippets. */ diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.snippets.test/src/com/oracle/graal/snippets/IntrinsificationTest.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/graal/com.oracle.graal.snippets.test/src/com/oracle/graal/snippets/IntrinsificationTest.java Fri Dec 14 13:02:49 2012 +0100 @@ -0,0 +1,443 @@ +/* + * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.graal.snippets; + +import static org.junit.Assert.*; + +import java.util.concurrent.*; + +import org.junit.*; + +import sun.misc.*; + +import com.oracle.graal.api.code.*; +import com.oracle.graal.compiler.test.*; +import com.oracle.graal.debug.*; +import com.oracle.graal.graph.*; +import com.oracle.graal.nodes.*; +import com.oracle.graal.nodes.calc.*; +import com.oracle.graal.phases.*; +import com.oracle.graal.phases.common.*; +import com.oracle.graal.snippets.nodes.*; + +/** + * Tests if compiler intrinsics are inlined correctly. Most test cases only assert that there are no remaining + * invocations in the graph. This is sufficient if the method that is being intrinsified is a native method. + * For Java methods, additional checks are necessary. + */ +public class IntrinsificationTest extends GraalCompilerTest { + @Test + public void testObjectIntrinsics() { + test("getClassSnippet"); + test("objectHashCodeSnippet"); + } + + @SuppressWarnings("all") + public static boolean getClassSnippet(Object obj, Class clazz) { + return obj.getClass() == clazz; + } + @SuppressWarnings("all") + public static int objectHashCodeSnippet(TestClassA obj) { + return obj.hashCode(); + } + + + @Test + public void testClassIntrinsics() { + test("getModifiersSnippet"); +// test("isInstanceSnippet"); + test("isInterfaceSnippet"); + test("isArraySnippet"); + test("isPrimitiveSnippet"); + test("getSuperClassSnippet"); + test("getComponentTypeSnippet"); + } + + @SuppressWarnings("all") + public static int getModifiersSnippet(Class clazz) { + return clazz.getModifiers(); + } + @SuppressWarnings("all") + public static boolean isInstanceSnippet(Class clazz) { + return clazz.isInstance(Number.class); + } + @SuppressWarnings("all") + public static boolean isInterfaceSnippet(Class clazz) { + return clazz.isInterface(); + } + @SuppressWarnings("all") + public static boolean isArraySnippet(Class clazz) { + return clazz.isArray(); + } + @SuppressWarnings("all") + public static boolean isPrimitiveSnippet(Class clazz) { + return clazz.isPrimitive(); + } + @SuppressWarnings("all") + public static Class getSuperClassSnippet(Class clazz) { + return clazz.getSuperclass(); + } + @SuppressWarnings("all") + public static Class getComponentTypeSnippet(Class clazz) { + return clazz.getComponentType(); + } + + + @Test + public void testThreadIntrinsics() { + test("currentThreadSnippet"); + test("threadIsInterruptedSnippet"); + test("threadInterruptedSnippet"); + } + + @SuppressWarnings("all") + public static Thread currentThreadSnippet() { + return Thread.currentThread(); + } + @SuppressWarnings("all") + public static boolean threadIsInterruptedSnippet(Thread thread) { + return thread.isInterrupted(); + } + @SuppressWarnings("all") + public static boolean threadInterruptedSnippet() { + return Thread.interrupted(); + } + + + @Test + public void testSystemIntrinsics() { + test("systemTimeSnippet"); + test("systemIdentityHashCode"); +// test("arraycopySnippet"); + } + + @SuppressWarnings("all") + public static long systemTimeSnippet() { + return System.currentTimeMillis() + + System.nanoTime(); + } + @SuppressWarnings("all") + public static int systemIdentityHashCode(Object obj) { + return System.identityHashCode(obj); + } + @SuppressWarnings("all") + public static void arraycopySnippet(int[] src, int srcPos, int[] dest, int destPos, int length) { + System.arraycopy(src, srcPos, dest, destPos, length); + } + + + @Test + public void testUnsafeIntrinsics() { + test("unsafeCompareAndSwapIntSnippet"); + test("unsafeCompareAndSwapLongSnippet"); + test("unsafeCompareAndSwapObjectSnippet"); + + test("unsafeGetBooleanSnippet"); + test("unsafeGetByteSnippet"); + test("unsafeGetShortSnippet"); + test("unsafeGetCharSnippet"); + test("unsafeGetIntSnippet"); + test("unsafeGetFloatSnippet"); + test("unsafeGetDoubleSnippet"); + test("unsafeGetObjectSnippet"); + + test("unsafePutBooleanSnippet"); + test("unsafePutByteSnippet"); + test("unsafePutShortSnippet"); + test("unsafePutCharSnippet"); + test("unsafePutIntSnippet"); + test("unsafePutFloatSnippet"); + test("unsafePutDoubleSnippet"); + test("unsafePutObjectSnippet"); + + test("unsafeDirectMemoryReadSnippet"); + test("unsafeDirectMemoryWriteSnippet"); + } + + @SuppressWarnings("all") + public static boolean unsafeCompareAndSwapIntSnippet(Unsafe unsafe, Object obj, long offset) { + return unsafe.compareAndSwapInt(obj, offset, 0, 1); + } + @SuppressWarnings("all") + public static boolean unsafeCompareAndSwapLongSnippet(Unsafe unsafe, Object obj, long offset) { + return unsafe.compareAndSwapLong(obj, offset, 0, 1); + } + @SuppressWarnings("all") + public static boolean unsafeCompareAndSwapObjectSnippet(Unsafe unsafe, Object obj, long offset) { + return unsafe.compareAndSwapObject(obj, offset, null, new Object()); + } + @SuppressWarnings("all") + public static boolean unsafeGetBooleanSnippet(Unsafe unsafe, Object obj, long offset) { + return unsafe.getBoolean(obj, offset) && + unsafe.getBooleanVolatile(obj, offset); + } + @SuppressWarnings("all") + public static int unsafeGetByteSnippet(Unsafe unsafe, Object obj, long offset) { + return unsafe.getByte(obj, offset) + + unsafe.getByteVolatile(obj, offset); + } + @SuppressWarnings("all") + public static int unsafeGetShortSnippet(Unsafe unsafe, Object obj, long offset) { + return unsafe.getShort(obj, offset) + + unsafe.getShortVolatile(obj, offset); + } + @SuppressWarnings("all") + public static int unsafeGetCharSnippet(Unsafe unsafe, Object obj, long offset) { + return unsafe.getChar(obj, offset) + + unsafe.getCharVolatile(obj, offset); + } + @SuppressWarnings("all") + public static int unsafeGetIntSnippet(Unsafe unsafe, Object obj, long offset) { + return unsafe.getInt(obj, offset) + + unsafe.getIntVolatile(obj, offset); + } + @SuppressWarnings("all") + public static long unsafeGetLongSnippet(Unsafe unsafe, Object obj, long offset) { + return unsafe.getLong(obj, offset) + + unsafe.getLongVolatile(obj, offset); + } + @SuppressWarnings("all") + public static float unsafeGetFloatSnippet(Unsafe unsafe, Object obj, long offset) { + return unsafe.getFloat(obj, offset) + + unsafe.getFloatVolatile(obj, offset); + } + @SuppressWarnings("all") + public static double unsafeGetDoubleSnippet(Unsafe unsafe, Object obj, long offset) { + return unsafe.getDouble(obj, offset) + + unsafe.getDoubleVolatile(obj, offset); + } + @SuppressWarnings("all") + public static boolean unsafeGetObjectSnippet(Unsafe unsafe, Object obj, long offset) { + return unsafe.getObject(obj, offset) == unsafe.getObjectVolatile(obj, offset); + } + @SuppressWarnings("all") + public static void unsafePutBooleanSnippet(Unsafe unsafe, Object obj, long offset, boolean value) { + unsafe.putBoolean(obj, offset, value); + unsafe.putBooleanVolatile(obj, offset, value); + } + @SuppressWarnings("all") + public static void unsafePutByteSnippet(Unsafe unsafe, Object obj, long offset, byte value) { + unsafe.putByte(obj, offset, value); + unsafe.putByteVolatile(obj, offset, value); + } + @SuppressWarnings("all") + public static void unsafePutShortSnippet(Unsafe unsafe, Object obj, long offset, short value) { + unsafe.putShort(obj, offset, value); + unsafe.putShortVolatile(obj, offset, value); + } + @SuppressWarnings("all") + public static void unsafePutCharSnippet(Unsafe unsafe, Object obj, long offset, char value) { + unsafe.putChar(obj, offset, value); + unsafe.putCharVolatile(obj, offset, value); + } + @SuppressWarnings("all") + public static void unsafePutIntSnippet(Unsafe unsafe, Object obj, long offset, int value) { + unsafe.putInt(obj, offset, value); + unsafe.putIntVolatile(obj, offset, value); + unsafe.putOrderedInt(obj, offset, value); + } + @SuppressWarnings("all") + public static void unsafePutLongSnippet(Unsafe unsafe, Object obj, long offset, long value) { + unsafe.putLong(obj, offset, value); + unsafe.putLongVolatile(obj, offset, value); + unsafe.putOrderedLong(obj, offset, value); + } + @SuppressWarnings("all") + public static void unsafePutFloatSnippet(Unsafe unsafe, Object obj, long offset, float value) { + unsafe.putFloat(obj, offset, value); + unsafe.putFloatVolatile(obj, offset, value); + } + @SuppressWarnings("all") + public static void unsafePutDoubleSnippet(Unsafe unsafe, Object obj, long offset, double value) { + unsafe.putDouble(obj, offset, value); + unsafe.putDoubleVolatile(obj, offset, value); + } + @SuppressWarnings("all") + public static void unsafePutObjectSnippet(Unsafe unsafe, Object obj, long offset, Object value) { + unsafe.putObject(obj, offset, value); + unsafe.putObjectVolatile(obj, offset, value); + unsafe.putOrderedObject(obj, offset, value); + } + @SuppressWarnings("all") + public static double unsafeDirectMemoryReadSnippet(Unsafe unsafe, long address) { + // Unsafe.getBoolean(long) and Unsafe.getObject(long) do not exist + return unsafe.getByte(address) + + unsafe.getShort(address) + + unsafe.getChar(address) + + unsafe.getInt(address) + + unsafe.getLong(address) + + unsafe.getFloat(address) + + unsafe.getDouble(address); + } + @SuppressWarnings("all") + public static void unsafeDirectMemoryWriteSnippet(Unsafe unsafe, long address, byte value) { + // Unsafe.putBoolean(long) and Unsafe.putObject(long) do not exist + unsafe.putByte(address, value); + unsafe.putShort(address, value); + unsafe.putChar(address, (char) value); + unsafe.putInt(address, value); + unsafe.putLong(address, value); + unsafe.putFloat(address, value); + unsafe.putDouble(address, value); + } + + + @Test + public void testMathIntrinsics() { + assertInGraph(assertNotInGraph(test("mathAbsSnippet"), IfNode.class), MathIntrinsicNode.class); // Java + test("mathSnippet"); + } + + @SuppressWarnings("all") + public static double mathAbsSnippet(double value) { + return Math.abs(value); + } + @SuppressWarnings("all") + public static double mathSnippet(double value) { + return Math.sqrt(value) + + Math.log(value) + + Math.log10(value) + + Math.sin(value) + + Math.cos(value) + + Math.tan(value); +// Math.exp(value) + +// Math.pow(value, 13); + } + + + @Test + public void testIntegerIntrinsics() { + assertInGraph(test("integerReverseBytesSnippet"), ReverseBytesNode.class); // Java + assertInGraph(test("integerNumberOfLeadingZerosSnippet"), BitScanReverseNode.class); // Java + assertInGraph(test("integerNumberOfTrailingZerosSnippet"), BitScanForwardNode.class); // Java + } + + @SuppressWarnings("all") + public static int integerReverseBytesSnippet(int value) { + return Integer.reverseBytes(value); + } + @SuppressWarnings("all") + public static int integerNumberOfLeadingZerosSnippet(int value) { + return Integer.numberOfLeadingZeros(value); + } + @SuppressWarnings("all") + public static int integerNumberOfTrailingZerosSnippet(int value) { + return Integer.numberOfTrailingZeros(value); + } + + + @Test + public void testLongIntrinsics() { + assertInGraph(test("longReverseBytesSnippet"), ReverseBytesNode.class); // Java + assertInGraph(test("longNumberOfLeadingZerosSnippet"), BitScanReverseNode.class); // Java + assertInGraph(test("longNumberOfTrailingZerosSnippet"), BitScanForwardNode.class); // Java + } + + @SuppressWarnings("all") + public static long longReverseBytesSnippet(long value) { + return Long.reverseBytes(value); + } + @SuppressWarnings("all") + public static long longNumberOfLeadingZerosSnippet(long value) { + return Long.numberOfLeadingZeros(value); + } + @SuppressWarnings("all") + public static long longNumberOfTrailingZerosSnippet(long value) { + return Long.numberOfTrailingZeros(value); + } + + + @Test + public void testFloatIntrinsics() { + assertInGraph(test("floatToIntBitsSnippet"), ConvertNode.class); // Java + test("intBitsToFloatSnippet"); + } + + @SuppressWarnings("all") + public static int floatToIntBitsSnippet(float value) { + return Float.floatToIntBits(value); + } + @SuppressWarnings("all") + public static float intBitsToFloatSnippet(int value) { + return Float.intBitsToFloat(value); + } + + + @Test + public void testDoubleIntrinsics() { + assertInGraph(test("doubleToLongBitsSnippet"), ConvertNode.class); // Java + test("longBitsToDoubleSnippet"); + } + + @SuppressWarnings("all") + public static long doubleToLongBitsSnippet(double value) { + return Double.doubleToLongBits(value); + } + @SuppressWarnings("all") + public static double longBitsToDoubleSnippet(long value) { + return Double.longBitsToDouble(value); + } + + + private StructuredGraph test(final String snippet) { + return Debug.scope("IntrinsificationTest", new DebugDumpScope(snippet), new Callable() { + @Override + public StructuredGraph call() { + StructuredGraph graph = parse(snippet); + PhasePlan phasePlan = getDefaultPhasePlan(); + Assumptions assumptions = new Assumptions(true); + new ComputeProbabilityPhase().apply(graph); + Debug.dump(graph, "Graph"); + new InliningPhase(null, runtime(), null, assumptions, null, phasePlan, OptimisticOptimizations.ALL).apply(graph); + Debug.dump(graph, "Graph"); + new CanonicalizerPhase(null, runtime(), assumptions).apply(graph); + new DeadCodeEliminationPhase().apply(graph); + + assertNotInGraph(graph, Invoke.class); + return graph; + } + }); + } + + private static StructuredGraph assertNotInGraph(StructuredGraph graph, Class clazz) { + for (Node node: graph.getNodes()) { + if (clazz.isInstance(node)) { + fail(node.toString()); + } + } + return graph; + } + + private static StructuredGraph assertInGraph(StructuredGraph graph, Class clazz) { + for (Node node: graph.getNodes()) { + if (clazz.isInstance(node)) { + return graph; + } + } + fail("Graph does not contain a node of class " + clazz.getName()); + return graph; + } + + private static class TestClassA { + } +} diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.snippets/src/com/oracle/graal/snippets/InstanceMethodSubstitution.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/graal/com.oracle.graal.snippets/src/com/oracle/graal/snippets/InstanceMethodSubstitution.java Fri Dec 14 13:02:49 2012 +0100 @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.graal.snippets; + +import java.lang.annotation.*; + +/** + * Denotes a method that substitutes a method of another class. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +public @interface InstanceMethodSubstitution { + String value() default ""; +} diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.snippets/src/com/oracle/graal/snippets/SnippetInstaller.java --- a/graal/com.oracle.graal.snippets/src/com/oracle/graal/snippets/SnippetInstaller.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.snippets/src/com/oracle/graal/snippets/SnippetInstaller.java Fri Dec 14 13:02:49 2012 +0100 @@ -92,7 +92,7 @@ throw new RuntimeException("Snippet must not be abstract or native"); } ResolvedJavaMethod snippet = runtime.lookupJavaMethod(method); - assert snippet.getCompilerStorage().get(Graph.class) == null; + assert snippet.getCompilerStorage().get(Graph.class) == null : method; StructuredGraph graph = makeGraph(snippet, inliningPolicy(snippet)); //System.out.println("snippet: " + graph); snippet.getCompilerStorage().put(Graph.class, graph); @@ -106,14 +106,19 @@ continue; } try { - String name = method.getName(); - MethodSubstitution a = method.getAnnotation(MethodSubstitution.class); - if (a != null) { - if (!a.value().equals("")) { - name = a.value(); + InstanceMethodSubstitution methodSubstitution = method.getAnnotation(InstanceMethodSubstitution.class); + String originalName = method.getName(); + Class[] originalParameters = method.getParameterTypes(); + if (methodSubstitution != null) { + if (!methodSubstitution.value().isEmpty()) { + originalName = methodSubstitution.value(); } + assert originalParameters.length >= 1 : "must be a static method with the this object as its first parameter"; + Class[] newParameters = new Class[originalParameters.length - 1]; + System.arraycopy(originalParameters, 1, newParameters, 0, newParameters.length); + originalParameters = newParameters; } - Method originalMethod = originalClazz.getDeclaredMethod(name, method.getParameterTypes()); + Method originalMethod = originalClazz.getDeclaredMethod(originalName, originalParameters); if (!originalMethod.getReturnType().isAssignableFrom(method.getReturnType())) { throw new RuntimeException("Snippet has incompatible return type"); } diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.snippets/src/com/oracle/graal/snippets/SnippetProvider.java --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/graal/com.oracle.graal.snippets/src/com/oracle/graal/snippets/SnippetProvider.java Fri Dec 14 13:02:49 2012 +0100 @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package com.oracle.graal.snippets; + +import com.oracle.graal.api.code.*; + + +public interface SnippetProvider { + void installSnippets(SnippetInstaller installer, Assumptions assumptions); +} diff -r 2ed8d74e5984 -r 8a3efb8c831d graal/com.oracle.graal.snippets/src/com/oracle/graal/snippets/nodes/MathIntrinsicNode.java --- a/graal/com.oracle.graal.snippets/src/com/oracle/graal/snippets/nodes/MathIntrinsicNode.java Fri Dec 14 10:20:54 2012 +0100 +++ b/graal/com.oracle.graal.snippets/src/com/oracle/graal/snippets/nodes/MathIntrinsicNode.java Fri Dec 14 13:02:49 2012 +0100 @@ -38,7 +38,7 @@ private final Operation operation; public enum Operation { - ABS, SQRT, LOG, LOG10, SIN, COS, TAN, + ABS, SQRT, LOG, LOG10, SIN, COS, TAN } public ValueNode x() { diff -r 2ed8d74e5984 -r 8a3efb8c831d make/windows/makefiles/projectcreator.make --- a/make/windows/makefiles/projectcreator.make Fri Dec 14 10:20:54 2012 +0100 +++ b/make/windows/makefiles/projectcreator.make Fri Dec 14 13:02:49 2012 +0100 @@ -144,6 +144,9 @@ -ignorePath_TARGET tiered \ -ignorePath_TARGET c1_ +ProjectCreatorIDEOptionsIgnoreGraal=\ + -ignorePath_TARGET graal + ProjectCreatorIDEOptionsIgnoreCompiler2=\ -ignorePath_TARGET compiler2 \ -ignorePath_TARGET tiered \ @@ -230,15 +233,18 @@ -define_compiler1 COMPILER1 \ -ignorePath_compiler1 core \ -ignorePath_compiler1 src/share/vm/graal \ -$(ProjectCreatorIDEOptionsIgnoreCompiler2:TARGET=compiler1) + $(ProjectCreatorIDEOptionsIgnoreGraal:TARGET=compiler1) \ + $(ProjectCreatorIDEOptionsIgnoreCompiler2:TARGET=compiler1) ################################################## # Graal compiler specific options ################################################## ProjectCreatorIDEOptions=$(ProjectCreatorIDEOptions) \ - -define_graal COMPILER1 \ -define_graal GRAAL \ -$(ProjectCreatorIDEOptionsIgnoreCompiler2:TARGET=graal) + -ignorePath_graal core \ + -ignorePath_graal src/share/vm/c1 \ + $(ProjectCreatorIDEOptionsIgnoreCompiler1:TARGET=graal) \ + $(ProjectCreatorIDEOptionsIgnoreCompiler2:TARGET=graal) ################################################## # Server(C2) compiler specific options @@ -260,6 +266,7 @@ -additionalFile_compiler2 ad_$(Platform_arch_model)_pipeline.cpp \ -additionalFile_compiler2 adGlobals_$(Platform_arch_model).hpp \ -additionalFile_compiler2 dfa_$(Platform_arch_model).cpp \ + $(ProjectCreatorIDEOptionsIgnoreGraal:TARGET=compiler2) \ $(ProjectCreatorIDEOptionsIgnoreCompiler1:TARGET=compiler2) # Add in the jvmti (JSR-163) options diff -r 2ed8d74e5984 -r 8a3efb8c831d make/windows/makefiles/vm.make --- a/make/windows/makefiles/vm.make Fri Dec 14 10:20:54 2012 +0100 +++ b/make/windows/makefiles/vm.make Fri Dec 14 13:02:49 2012 +0100 @@ -61,7 +61,7 @@ !endif !if "$(Variant)" == "graal" -CPP_FLAGS=$(CPP_FLAGS) /D "COMPILER1" /D "GRAAL" +CPP_FLAGS=$(CPP_FLAGS) /D "GRAAL" !endif !if "$(BUILDARCH)" == "i486" diff -r 2ed8d74e5984 -r 8a3efb8c831d src/cpu/x86/vm/c1_globals_x86.hpp --- a/src/cpu/x86/vm/c1_globals_x86.hpp Fri Dec 14 10:20:54 2012 +0100 +++ b/src/cpu/x86/vm/c1_globals_x86.hpp Fri Dec 14 13:02:49 2012 +0100 @@ -57,6 +57,7 @@ define_pd_global(bool, CICompileOSR, true ); #endif // !TIERED define_pd_global(bool, UseTypeProfile, false); +define_pd_global(intx, TypeProfileWidth, 0); define_pd_global(bool, RoundFPResults, true ); define_pd_global(bool, LIRFillDelaySlots, false); diff -r 2ed8d74e5984 -r 8a3efb8c831d src/cpu/x86/vm/graalGlobals_x86.hpp --- a/src/cpu/x86/vm/graalGlobals_x86.hpp Fri Dec 14 10:20:54 2012 +0100 +++ b/src/cpu/x86/vm/graalGlobals_x86.hpp Fri Dec 14 13:02:49 2012 +0100 @@ -50,7 +50,6 @@ define_pd_global(intx, FreqInlineSize, 325 ); define_pd_global(intx, NewSizeThreadIncrease, 4*K ); define_pd_global(uintx,MetaspaceSize, 12*M ); -define_pd_global(uintx,MaxPermSize, 64*M ); define_pd_global(bool, NeverActAsServerClassMachine, false); define_pd_global(uint64_t,MaxRAM, 1ULL*G); define_pd_global(bool, CICompileOSR, true ); diff -r 2ed8d74e5984 -r 8a3efb8c831d src/cpu/x86/vm/graalRuntime_x86.cpp --- a/src/cpu/x86/vm/graalRuntime_x86.cpp Fri Dec 14 10:20:54 2012 +0100 +++ b/src/cpu/x86/vm/graalRuntime_x86.cpp Fri Dec 14 13:02:49 2012 +0100 @@ -1142,6 +1142,35 @@ break; } + case graal_identity_hash_code_id: { + Register obj = j_rarg0; // Incoming + __ set_info("identity_hash_code", dont_gc_arguments); + __ enter(); + OopMap* map = save_live_registers(sasm, 1); + int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, graal_identity_hash_code), obj); + oop_maps = new OopMapSet(); + oop_maps->add_gc_map(call_offset, map); + restore_live_registers_except_rax(sasm); + __ leave(); + __ ret(0); + break; + } + case graal_thread_is_interrupted_id: { + Register thread = j_rarg0; + Register clear_interrupted = j_rarg1; + + __ set_info("identity_hash_code", dont_gc_arguments); + __ enter(); + OopMap* map = save_live_registers(sasm, 1); + int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, graal_thread_is_interrupted), thread, clear_interrupted); + oop_maps = new OopMapSet(); + oop_maps->add_gc_map(call_offset, map); + restore_live_registers_except_rax(sasm); + __ leave(); + __ ret(0); + break; + } + default: { GraalStubFrame f(sasm, "unimplemented entry", dont_gc_arguments); __ movptr(rax, (int)id); diff -r 2ed8d74e5984 -r 8a3efb8c831d src/share/vm/graal/graalCompiler.cpp --- a/src/share/vm/graal/graalCompiler.cpp Fri Dec 14 10:20:54 2012 +0100 +++ b/src/share/vm/graal/graalCompiler.cpp Fri Dec 14 13:02:49 2012 +0100 @@ -49,8 +49,8 @@ JavaThread* THREAD = JavaThread::current(); TRACE_graal_1("GraalCompiler::initialize"); - unsigned long heap_end = (long) Universe::heap()->reserved_region().end(); - unsigned long allocation_end = heap_end + 16l * 1024 * 1024 * 1024; + uintptr_t heap_end = (uintptr_t) Universe::heap()->reserved_region().end(); + uintptr_t allocation_end = heap_end + ((uintptr_t)16) * 1024 * 1024 * 1024; guarantee(heap_end < allocation_end, "heap end too close to end of address space (might lead to erroneous TLAB allocations)"); NOT_LP64(error("check TLAB allocation code for address space conflicts")); diff -r 2ed8d74e5984 -r 8a3efb8c831d src/share/vm/graal/graalCompilerToVM.cpp --- a/src/share/vm/graal/graalCompilerToVM.cpp Fri Dec 14 10:20:54 2012 +0100 +++ b/src/share/vm/graal/graalCompilerToVM.cpp Fri Dec 14 13:02:49 2012 +0100 @@ -224,16 +224,17 @@ C2V_VMENTRY(jlong, getUniqueConcreteMethod, (JNIEnv *, jobject, jlong metaspace_method, jobject resultHolder)) methodHandle method = asMethod(metaspace_method); KlassHandle holder = method->method_holder(); - if (holder->is_interface()) { - // Cannot trust interfaces. Because of: - // interface I { void foo(); } - // class A { public void foo() {} } - // class B extends A implements I { } - // class C extends B { public void foo() { } } - // class D extends B { } - // Would lead to identify C.foo() as the unique concrete method for I.foo() without seeing A.foo(). - return 0L; - } + // TODO (chaeubl): check if the following is necessary + //if (holder->is_interface()) { + // // Cannot trust interfaces. Because of: + // // interface I { void foo(); } + // // class A { public void foo() {} } + // // class B extends A implements I { } + // // class C extends B { public void foo() { } } + // // class D extends B { } + // // Would lead to identify C.foo() as the unique concrete method for I.foo() without seeing A.foo(). + // return 0L; + //} methodHandle ucm; { ResourceMark rm; @@ -250,6 +251,19 @@ return (jlong) (address) ucm(); C2V_END +C2V_VMENTRY(jobject, getUniqueImplementor, (JNIEnv *, jobject, jobject interface_type)) + InstanceKlass* klass = (InstanceKlass*) asKlass(HotSpotResolvedObjectType::metaspaceKlass(interface_type)); + assert(klass->is_interface(), "must be"); + if (klass->nof_implementors() == 1) { + InstanceKlass* implementor = (InstanceKlass*) klass->implementor(); + if (!implementor->is_abstract() && !implementor->is_interface() && implementor->is_leaf_class()) { + Handle type = GraalCompiler::get_JavaType(implementor, CHECK_NULL); + return JNIHandles::make_local(THREAD, type()); + } + } + return NULL; +C2V_END + C2V_ENTRY(jint, getInvocationCount, (JNIEnv *, jobject, jlong metaspace_method)) Method* method = asMethod(metaspace_method); return method->invocation_count(); @@ -614,6 +628,8 @@ set_int("threadTlabTopOffset", in_bytes(JavaThread::tlab_top_offset())); set_int("threadTlabEndOffset", in_bytes(JavaThread::tlab_end_offset())); set_int("threadObjectOffset", in_bytes(JavaThread::threadObj_offset())); + set_int("osThreadOffset", in_bytes(JavaThread::osthread_offset())); + set_int("osThreadInterruptedOffset", in_bytes(OSThread::interrupted_offset())); set_int("unlockedMask", (int) markOopDesc::unlocked_value); set_int("biasedLockMaskInPlace", (int) markOopDesc::biased_lock_mask_in_place); set_int("ageMaskInPlace", (int) markOopDesc::age_mask_in_place); @@ -633,11 +649,19 @@ set_int("klassAccessFlagsOffset", in_bytes(Klass::access_flags_offset())); set_int("klassOffset", java_lang_Class::klass_offset_in_bytes()); set_int("graalMirrorInClassOffset", java_lang_Class::graal_mirror_offset_in_bytes()); + set_int("klassLayoutHelperOffset", in_bytes(Klass::layout_helper_offset())); + set_int("klassSuperKlassOffset", in_bytes(Klass::super_offset())); set_int("methodDataOffset", in_bytes(Method::method_data_offset())); set_int("nmethodEntryOffset", nmethod::verified_entry_point_offset()); set_int("methodCompiledEntryOffset", in_bytes(Method::from_compiled_offset())); set_int("basicLockSize", sizeof(BasicLock)); set_int("basicLockDisplacedHeaderOffset", BasicLock::displaced_header_offset_in_bytes()); + set_int("uninitializedIdentityHashCodeValue", markOopDesc::no_hash); + set_int("identityHashCodeShift", markOopDesc::hash_shift); + + set_int("arrayKlassLayoutHelperIdentifier", 0x80000000); + assert((Klass::_lh_array_tag_obj_value & Klass::_lh_array_tag_type_value & 0x80000000) != 0, "obj_array and type_array must have first bit set"); + set_int("arrayKlassComponentMirrorOffset", in_bytes(ArrayKlass::component_mirror_offset())); set_int("metaspaceArrayLengthOffset", Array::length_offset_in_bytes()); set_int("metaspaceArrayBaseOffset", Array::base_offset_in_bytes()); @@ -682,6 +706,8 @@ set_long("newInstanceStub", VmIds::addStub(GraalRuntime::entry_for(GraalRuntime::graal_new_instance_id))); set_long("newArrayStub", VmIds::addStub(GraalRuntime::entry_for(GraalRuntime::graal_new_array_id))); set_long("newMultiArrayStub", VmIds::addStub(GraalRuntime::entry_for(GraalRuntime::graal_new_multi_array_id))); + set_long("identityHashCodeStub", VmIds::addStub(GraalRuntime::entry_for(GraalRuntime::graal_identity_hash_code_id))); + set_long("threadIsInterruptedStub", VmIds::addStub(GraalRuntime::entry_for(GraalRuntime::graal_thread_is_interrupted_id))); set_long("inlineCacheMissStub", VmIds::addStub(SharedRuntime::get_ic_miss_stub())); set_long("handleExceptionStub", VmIds::addStub(GraalRuntime::entry_for(GraalRuntime::graal_handle_exception_nofpu_id))); set_long("handleDeoptStub", VmIds::addStub(SharedRuntime::deopt_blob()->unpack())); @@ -952,6 +978,7 @@ {CC"initializeExceptionHandlers", CC"("METASPACE_METHOD EXCEPTION_HANDLERS")"EXCEPTION_HANDLERS, FN_PTR(initializeExceptionHandlers)}, {CC"hasBalancedMonitors", CC"("METASPACE_METHOD")Z", FN_PTR(hasBalancedMonitors)}, {CC"getUniqueConcreteMethod", CC"("METASPACE_METHOD"["HS_RESOLVED_TYPE")"METASPACE_METHOD, FN_PTR(getUniqueConcreteMethod)}, + {CC"getUniqueImplementor", CC"("HS_RESOLVED_TYPE")"RESOLVED_TYPE, FN_PTR(getUniqueImplementor)}, {CC"getStackTraceElement", CC"("METASPACE_METHOD"I)"STACK_TRACE_ELEMENT, FN_PTR(getStackTraceElement)}, {CC"initializeMethod", CC"("METASPACE_METHOD HS_RESOLVED_METHOD")V", FN_PTR(initializeMethod)}, {CC"initializeMethodData", CC"("METASPACE_METHOD_DATA METHOD_DATA")V", FN_PTR(initializeMethodData)}, diff -r 2ed8d74e5984 -r 8a3efb8c831d src/share/vm/graal/graalGlobals.hpp --- a/src/share/vm/graal/graalGlobals.hpp Fri Dec 14 10:20:54 2012 +0100 +++ b/src/share/vm/graal/graalGlobals.hpp Fri Dec 14 13:02:49 2012 +0100 @@ -53,7 +53,7 @@ "Bootstrap Graal before running Java main method") \ \ product(ccstr, GraalClassPath, NULL, \ - "Use the class path for Graal classes") \ + "Use this path, zip, or jar, to locate Graal-specific classes") \ \ product(intx, TraceGraal, 0, \ "Trace level for Graal") \ diff -r 2ed8d74e5984 -r 8a3efb8c831d src/share/vm/graal/graalRuntime.cpp --- a/src/share/vm/graal/graalRuntime.cpp Fri Dec 14 10:20:54 2012 +0100 +++ b/src/share/vm/graal/graalRuntime.cpp Fri Dec 14 13:02:49 2012 +0100 @@ -1,584 +1,601 @@ -/* - * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA - * or visit www.oracle.com if you need additional information or have any - * questions. - */ - -#include "precompiled.hpp" -#include "runtime/interfaceSupport.hpp" -#include "prims/jvm.h" -#include "graal/graalRuntime.hpp" -#include "graal/graalVMToCompiler.hpp" -#include "asm/codeBuffer.hpp" -#include "runtime/biasedLocking.hpp" - -// Implementation of GraalStubAssembler - -GraalStubAssembler::GraalStubAssembler(CodeBuffer* code, const char * name, int stub_id) : MacroAssembler(code) { - _name = name; - _must_gc_arguments = false; - _frame_size = no_frame_size; - _num_rt_args = 0; - _stub_id = stub_id; -} - - -void GraalStubAssembler::set_info(const char* name, bool must_gc_arguments) { - _name = name; - _must_gc_arguments = must_gc_arguments; -} - - -void GraalStubAssembler::set_frame_size(int size) { - if (_frame_size == no_frame_size) { - _frame_size = size; - } - assert(_frame_size == size, "can't change the frame size"); -} - - -void GraalStubAssembler::set_num_rt_args(int args) { - if (_num_rt_args == 0) { - _num_rt_args = args; - } - assert(_num_rt_args == args, "can't change the number of args"); -} - -// Implementation of GraalRuntime - -CodeBlob* GraalRuntime::_blobs[GraalRuntime::number_of_ids]; -const char *GraalRuntime::_blob_names[] = { - GRAAL_STUBS(STUB_NAME, LAST_STUB_NAME) -}; - -// Simple helper to see if the caller of a runtime stub which -// entered the VM has been deoptimized - -static bool caller_is_deopted() { - JavaThread* thread = JavaThread::current(); - RegisterMap reg_map(thread, false); - frame runtime_frame = thread->last_frame(); - frame caller_frame = runtime_frame.sender(®_map); - assert(caller_frame.is_compiled_frame(), "must be compiled"); - return caller_frame.is_deoptimized_frame(); -} - -// Stress deoptimization -static void deopt_caller() { - if ( !caller_is_deopted()) { - JavaThread* thread = JavaThread::current(); - RegisterMap reg_map(thread, false); - frame runtime_frame = thread->last_frame(); - frame caller_frame = runtime_frame.sender(®_map); - Deoptimization::deoptimize_frame(thread, caller_frame.id(), Deoptimization::Reason_constraint); - assert(caller_is_deopted(), "Must be deoptimized"); - } -} - -static bool setup_code_buffer(CodeBuffer* code) { - // Preinitialize the consts section to some large size: - int locs_buffer_size = 1 * (relocInfo::length_limit + sizeof(relocInfo)); - char* locs_buffer = NEW_RESOURCE_ARRAY(char, locs_buffer_size); - code->insts()->initialize_shared_locs((relocInfo*)locs_buffer, - locs_buffer_size / sizeof(relocInfo)); - - // Global stubs have neither constants nor local stubs - code->initialize_consts_size(0); - code->initialize_stubs_size(0); - - return true; -} - -void GraalRuntime::generate_blob_for(BufferBlob* buffer_blob, StubID id) { - assert(0 <= id && id < number_of_ids, "illegal stub id"); - ResourceMark rm; - // create code buffer for code storage - CodeBuffer code(buffer_blob); - - setup_code_buffer(&code); - - // create assembler for code generation - GraalStubAssembler* sasm = new GraalStubAssembler(&code, name_for(id), id); - // generate code for runtime stub - OopMapSet* oop_maps; - oop_maps = generate_code_for(id, sasm); - assert(oop_maps == NULL || sasm->frame_size() != GraalStubAssembler::no_frame_size, - "if stub has an oop map it must have a valid frame size"); - -#ifdef ASSERT - // Make sure that stubs that need oopmaps have them - switch (id) { - // These stubs don't need to have an oopmap - case graal_slow_subtype_check_id: -#if defined(SPARC) || defined(PPC) - case handle_exception_nofpu_id: // Unused on sparc -#endif - case graal_verify_oop_id: - case graal_unwind_exception_call_id: - case graal_OSR_migration_end_id: - case graal_arithmetic_frem_id: - case graal_arithmetic_drem_id: - case graal_set_deopt_info_id: - break; - - // All other stubs should have oopmaps - default: - assert(oop_maps != NULL, "must have an oopmap"); - } -#endif - - // align so printing shows nop's instead of random code at the end (SimpleStubs are aligned) - sasm->align(BytesPerWord); - // make sure all code is in code buffer - sasm->flush(); - // create blob - distinguish a few special cases - CodeBlob* blob = RuntimeStub::new_runtime_stub(name_for(id), - &code, - CodeOffsets::frame_never_safe, - sasm->frame_size(), - oop_maps, - sasm->must_gc_arguments()); - // install blob - assert(blob != NULL, "blob must exist"); - _blobs[id] = blob; -} - - -void GraalRuntime::initialize(BufferBlob* blob) { - // generate stubs - for (int id = 0; id < number_of_ids; id++) generate_blob_for(blob, (StubID)id); - // printing -#ifndef PRODUCT - if (GraalPrintSimpleStubs) { - ResourceMark rm; - for (int id = 0; id < number_of_ids; id++) { - _blobs[id]->print(); - if (_blobs[id]->oop_maps() != NULL) { - _blobs[id]->oop_maps()->print(); - } - } - } -#endif -} - - -CodeBlob* GraalRuntime::blob_for(StubID id) { - assert(0 <= id && id < number_of_ids, "illegal stub id"); - return _blobs[id]; -} - - -const char* GraalRuntime::name_for(StubID id) { - assert(0 <= id && id < number_of_ids, "illegal stub id"); - return _blob_names[id]; -} - -const char* GraalRuntime::name_for_address(address entry) { - for (int id = 0; id < number_of_ids; id++) { - if (entry == entry_for((StubID)id)) return name_for((StubID)id); - } - -#define FUNCTION_CASE(a, f) \ - if ((intptr_t)a == CAST_FROM_FN_PTR(intptr_t, f)) return #f - - FUNCTION_CASE(entry, os::javaTimeMillis); - FUNCTION_CASE(entry, os::javaTimeNanos); - FUNCTION_CASE(entry, SharedRuntime::OSR_migration_end); - FUNCTION_CASE(entry, SharedRuntime::d2f); - FUNCTION_CASE(entry, SharedRuntime::d2i); - FUNCTION_CASE(entry, SharedRuntime::d2l); - FUNCTION_CASE(entry, SharedRuntime::dcos); - FUNCTION_CASE(entry, SharedRuntime::dexp); - FUNCTION_CASE(entry, SharedRuntime::dlog); - FUNCTION_CASE(entry, SharedRuntime::dlog10); - FUNCTION_CASE(entry, SharedRuntime::dpow); - FUNCTION_CASE(entry, SharedRuntime::drem); - FUNCTION_CASE(entry, SharedRuntime::dsin); - FUNCTION_CASE(entry, SharedRuntime::dtan); - FUNCTION_CASE(entry, SharedRuntime::f2i); - FUNCTION_CASE(entry, SharedRuntime::f2l); - FUNCTION_CASE(entry, SharedRuntime::frem); - FUNCTION_CASE(entry, SharedRuntime::l2d); - FUNCTION_CASE(entry, SharedRuntime::l2f); - FUNCTION_CASE(entry, SharedRuntime::ldiv); - FUNCTION_CASE(entry, SharedRuntime::lmul); - FUNCTION_CASE(entry, SharedRuntime::lrem); - FUNCTION_CASE(entry, SharedRuntime::lrem); - FUNCTION_CASE(entry, SharedRuntime::dtrace_method_entry); - FUNCTION_CASE(entry, SharedRuntime::dtrace_method_exit); -#ifdef TRACE_HAVE_INTRINSICS - FUNCTION_CASE(entry, TRACE_TIME_METHOD); -#endif - -#undef FUNCTION_CASE -} - - -JRT_ENTRY(void, GraalRuntime::new_instance(JavaThread* thread, Klass* klass)) - assert(klass->is_klass(), "not a class"); - instanceKlassHandle h(thread, klass); - h->check_valid_for_instantiation(true, CHECK); - // make sure klass is initialized - h->initialize(CHECK); - // allocate instance and return via TLS - oop obj = h->allocate_instance(CHECK); - thread->set_vm_result(obj); -JRT_END - -JRT_ENTRY(void, GraalRuntime::new_array(JavaThread* thread, Klass* array_klass, jint length)) - // Note: no handle for klass needed since they are not used - // anymore after new_objArray() and no GC can happen before. - // (This may have to change if this code changes!) - assert(array_klass->is_klass(), "not a class"); - oop obj; - if (array_klass->oop_is_typeArray()) { - BasicType elt_type = TypeArrayKlass::cast(array_klass)->element_type(); - obj = oopFactory::new_typeArray(elt_type, length, CHECK); - } else { - Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass(); - obj = oopFactory::new_objArray(elem_klass, length, CHECK); - } - thread->set_vm_result(obj); - // This is pretty rare but this runtime patch is stressful to deoptimization - // if we deoptimize here so force a deopt to stress the path. - if (DeoptimizeALot) { - deopt_caller(); - } -JRT_END - - -JRT_ENTRY(void, GraalRuntime::new_multi_array(JavaThread* thread, Klass* klass, int rank, jint* dims)) - assert(klass->is_klass(), "not a class"); - assert(rank >= 1, "rank must be nonzero"); - oop obj = ArrayKlass::cast(klass)->multi_allocate(rank, dims, CHECK); - thread->set_vm_result(obj); -JRT_END - -JRT_ENTRY(void, GraalRuntime::unimplemented_entry(JavaThread* thread, StubID id)) - tty->print_cr("GraalRuntime::entry_for(%d) returned unimplemented entry point", id); -JRT_END - -extern void vm_exit(int code); - -// Enter this method from compiled code handler below. This is where we transition -// to VM mode. This is done as a helper routine so that the method called directly -// from compiled code does not have to transition to VM. This allows the entry -// method to see if the nmethod that we have just looked up a handler for has -// been deoptimized while we were in the vm. This simplifies the assembly code -// cpu directories. -// -// We are entering here from exception stub (via the entry method below) -// If there is a compiled exception handler in this method, we will continue there; -// otherwise we will unwind the stack and continue at the caller of top frame method -// Note: we enter in Java using a special JRT wrapper. This wrapper allows us to -// control the area where we can allow a safepoint. After we exit the safepoint area we can -// check to see if the handler we are going to return is now in a nmethod that has -// been deoptimized. If that is the case we return the deopt blob -// unpack_with_exception entry instead. This makes life for the exception blob easier -// because making that same check and diverting is painful from assembly language. -JRT_ENTRY_NO_ASYNC(static address, exception_handler_for_pc_helper(JavaThread* thread, oopDesc* ex, address pc, nmethod*& nm)) - // Reset method handle flag. - thread->set_is_method_handle_return(false); - - Handle exception(thread, ex); - nm = CodeCache::find_nmethod(pc); - assert(nm != NULL, "this is not an nmethod"); - // Adjust the pc as needed/ - if (nm->is_deopt_pc(pc)) { - RegisterMap map(thread, false); - frame exception_frame = thread->last_frame().sender(&map); - // if the frame isn't deopted then pc must not correspond to the caller of last_frame - assert(exception_frame.is_deoptimized_frame(), "must be deopted"); - pc = exception_frame.pc(); - } -#ifdef ASSERT - assert(exception.not_null(), "NULL exceptions should be handled by throw_exception"); - assert(exception->is_oop(), "just checking"); - // Check that exception is a subclass of Throwable, otherwise we have a VerifyError - if (!(exception->is_a(SystemDictionary::Throwable_klass()))) { - if (ExitVMOnVerifyError) vm_exit(-1); - ShouldNotReachHere(); - } -#endif - - // Check the stack guard pages and reenable them if necessary and there is - // enough space on the stack to do so. Use fast exceptions only if the guard - // pages are enabled. - bool guard_pages_enabled = thread->stack_yellow_zone_enabled(); - if (!guard_pages_enabled) guard_pages_enabled = thread->reguard_stack(); - - if (JvmtiExport::can_post_on_exceptions()) { - // To ensure correct notification of exception catches and throws - // we have to deoptimize here. If we attempted to notify the - // catches and throws during this exception lookup it's possible - // we could deoptimize on the way out of the VM and end back in - // the interpreter at the throw site. This would result in double - // notifications since the interpreter would also notify about - // these same catches and throws as it unwound the frame. - - RegisterMap reg_map(thread); - frame stub_frame = thread->last_frame(); - frame caller_frame = stub_frame.sender(®_map); - - // We don't really want to deoptimize the nmethod itself since we - // can actually continue in the exception handler ourselves but I - // don't see an easy way to have the desired effect. - Deoptimization::deoptimize_frame(thread, caller_frame.id(), Deoptimization::Reason_constraint); - assert(caller_is_deopted(), "Must be deoptimized"); - - return SharedRuntime::deopt_blob()->unpack_with_exception_in_tls(); - } - - // ExceptionCache is used only for exceptions at call sites and not for implicit exceptions - if (guard_pages_enabled) { - address fast_continuation = nm->handler_for_exception_and_pc(exception, pc); - if (fast_continuation != NULL) { - // Set flag if return address is a method handle call site. - thread->set_is_method_handle_return(nm->is_method_handle_return(pc)); - return fast_continuation; - } - } - - // If the stack guard pages are enabled, check whether there is a handler in - // the current method. Otherwise (guard pages disabled), force an unwind and - // skip the exception cache update (i.e., just leave continuation==NULL). - address continuation = NULL; - if (guard_pages_enabled) { - - // New exception handling mechanism can support inlined methods - // with exception handlers since the mappings are from PC to PC - - // debugging support - // tracing - if (TraceExceptions) { - ttyLocker ttyl; - ResourceMark rm; - int offset = pc - nm->code_begin(); - tty->print_cr("Exception <%s> (0x%x) thrown in compiled method <%s> at PC " PTR_FORMAT " [" PTR_FORMAT "+%d] for thread 0x%x", - exception->print_value_string(), (address)exception(), nm->method()->print_value_string(), pc, nm->code_begin(), offset, thread); - } - // for AbortVMOnException flag - NOT_PRODUCT(Exceptions::debug_check_abort(exception)); - - // Clear out the exception oop and pc since looking up an - // exception handler can cause class loading, which might throw an - // exception and those fields are expected to be clear during - // normal bytecode execution. - thread->set_exception_oop(NULL); - thread->set_exception_pc(NULL); - - continuation = SharedRuntime::compute_compiled_exc_handler(nm, pc, exception, false, false); - // If an exception was thrown during exception dispatch, the exception oop may have changed - thread->set_exception_oop(exception()); - thread->set_exception_pc(pc); - - // the exception cache is used only by non-implicit exceptions - if (continuation != NULL && !SharedRuntime::deopt_blob()->contains(continuation)) { - nm->add_handler_for_exception_and_pc(exception, pc, continuation); - } - } - - thread->set_vm_result(exception()); - // Set flag if return address is a method handle call site. - thread->set_is_method_handle_return(nm->is_method_handle_return(pc)); - - if (TraceExceptions) { - ttyLocker ttyl; - ResourceMark rm; - tty->print_cr("Thread " PTR_FORMAT " continuing at PC " PTR_FORMAT " for exception thrown at PC " PTR_FORMAT, - thread, continuation, pc); - } - - return continuation; -JRT_END - -// Enter this method from compiled code only if there is a Java exception handler -// in the method handling the exception. -// We are entering here from exception stub. We don't do a normal VM transition here. -// We do it in a helper. This is so we can check to see if the nmethod we have just -// searched for an exception handler has been deoptimized in the meantime. -address GraalRuntime::exception_handler_for_pc(JavaThread* thread) { - oop exception = thread->exception_oop(); - address pc = thread->exception_pc(); - // Still in Java mode - DEBUG_ONLY(ResetNoHandleMark rnhm); - nmethod* nm = NULL; - address continuation = NULL; - { - // Enter VM mode by calling the helper - ResetNoHandleMark rnhm; - continuation = exception_handler_for_pc_helper(thread, exception, pc, nm); - } - // Back in JAVA, use no oops DON'T safepoint - - // Now check to see if the nmethod we were called from is now deoptimized. - // If so we must return to the deopt blob and deoptimize the nmethod - if (nm != NULL && caller_is_deopted()) { - continuation = SharedRuntime::deopt_blob()->unpack_with_exception_in_tls(); - } - - assert(continuation != NULL, "no handler found"); - return continuation; -} - -JRT_ENTRY(void, GraalRuntime::graal_create_null_exception(JavaThread* thread)) - thread->set_vm_result(Exceptions::new_exception(thread, vmSymbols::java_lang_NullPointerException(), NULL)()); -JRT_END - -JRT_ENTRY(void, GraalRuntime::graal_create_out_of_bounds_exception(JavaThread* thread, jint index)) - char message[jintAsStringSize]; - sprintf(message, "%d", index); - thread->set_vm_result(Exceptions::new_exception(thread, vmSymbols::java_lang_ArrayIndexOutOfBoundsException(), message)()); -JRT_END - -JRT_ENTRY_NO_ASYNC(void, GraalRuntime::graal_monitorenter(JavaThread* thread, oopDesc* obj, BasicLock* lock)) - if (TraceGraal >= 3) { - char type[O_BUFLEN]; - obj->klass()->name()->as_C_string(type, O_BUFLEN); - markOop mark = obj->mark(); - tty->print_cr("%s: entered locking slow case with obj=" INTPTR_FORMAT ", type=%s, mark=" INTPTR_FORMAT ", lock=" INTPTR_FORMAT, thread->name(), obj, type, mark, lock); - tty->flush(); - } -#ifdef ASSERT - if (PrintBiasedLockingStatistics) { - Atomic::inc(BiasedLocking::slow_path_entry_count_addr()); - } -#endif - Handle h_obj(thread, obj); - assert(h_obj()->is_oop(), "must be NULL or an object"); - if (UseBiasedLocking) { - // Retry fast entry if bias is revoked to avoid unnecessary inflation - ObjectSynchronizer::fast_enter(h_obj, lock, true, CHECK); - } else { - if (GraalUseFastLocking) { - // When using fast locking, the compiled code has already tried the fast case - ObjectSynchronizer::slow_enter(h_obj, lock, THREAD); - } else { - ObjectSynchronizer::fast_enter(h_obj, lock, false, THREAD); - } - } - if (TraceGraal >= 3) { - tty->print_cr("%s: exiting locking slow with obj=" INTPTR_FORMAT, thread->name(), obj); - } -JRT_END - - -JRT_LEAF(void, GraalRuntime::graal_monitorexit(JavaThread* thread, oopDesc* obj, BasicLock* lock)) - assert(thread == JavaThread::current(), "threads must correspond"); - assert(thread->last_Java_sp(), "last_Java_sp must be set"); - // monitorexit is non-blocking (leaf routine) => no exceptions can be thrown - EXCEPTION_MARK; - -#ifdef DEBUG - if (!obj->is_oop()) { - ResetNoHandleMark rhm; - nmethod* method = thread->last_frame().cb()->as_nmethod_or_null(); - if (method != NULL) { - tty->print_cr("ERROR in monitorexit in method %s wrong obj " INTPTR_FORMAT, method->name(), obj); - } - thread->print_stack_on(tty); - assert(false, "invalid lock object pointer dected"); - } -#endif - - if (GraalUseFastLocking) { - // When using fast locking, the compiled code has already tried the fast case - ObjectSynchronizer::slow_exit(obj, lock, THREAD); - } else { - ObjectSynchronizer::fast_exit(obj, lock, THREAD); - } - if (TraceGraal >= 3) { - char type[O_BUFLEN]; - obj->klass()->name()->as_C_string(type, O_BUFLEN); - tty->print_cr("%s: exited locking slow case with obj=" INTPTR_FORMAT ", type=%s, mark=" INTPTR_FORMAT ", lock=" INTPTR_FORMAT, thread->name(), obj, type, obj->mark(), lock); - tty->flush(); - } -JRT_END - -JRT_ENTRY(void, GraalRuntime::graal_log_object(JavaThread* thread, oop obj, jint flags)) - bool string = mask_bits_are_true(flags, LOG_OBJECT_STRING); - bool address = mask_bits_are_true(flags, LOG_OBJECT_ADDRESS); - bool newline = mask_bits_are_true(flags, LOG_OBJECT_NEWLINE); - if (!string) { - if (!address && obj->is_oop_or_null(true)) { - char buf[O_BUFLEN]; - tty->print("%s@%p", obj->klass()->name()->as_C_string(buf, O_BUFLEN), obj); - } else { - tty->print("%p", obj); - } - } else { - ResourceMark rm; - assert(obj != NULL && java_lang_String::is_instance(obj), "must be"); - char *buf = java_lang_String::as_utf8_string(obj); - tty->print(buf); - } - if (newline) { - tty->cr(); - } -JRT_END - -JRT_ENTRY(void, GraalRuntime::graal_vm_error(JavaThread* thread, oop where, oop format, jlong value)) - ResourceMark rm; - assert(where == NULL || java_lang_String::is_instance(where), "must be"); - const char *error_msg = where == NULL ? "" : java_lang_String::as_utf8_string(where); - char *detail_msg = NULL; - if (format != NULL) { - const char* buf = java_lang_String::as_utf8_string(format); - size_t detail_msg_length = strlen(buf) * 2; - detail_msg = (char *) NEW_RESOURCE_ARRAY(u_char, detail_msg_length); - jio_snprintf(detail_msg, detail_msg_length, buf, value); - } - report_vm_error(__FILE__, __LINE__, error_msg, detail_msg); -JRT_END - -JRT_ENTRY(void, GraalRuntime::graal_log_printf(JavaThread* thread, oop format, jlong val)) - ResourceMark rm; - assert(format != NULL && java_lang_String::is_instance(format), "must be"); - char *buf = java_lang_String::as_utf8_string(format); - tty->print(buf, val); -JRT_END - -JRT_ENTRY(void, GraalRuntime::graal_log_primitive(JavaThread* thread, jchar typeChar, jlong value, jboolean newline)) - union { - jlong l; - jdouble d; - jfloat f; - } uu; - uu.l = value; - switch (typeChar) { - case 'z': tty->print(value == 0 ? "false" : "true"); break; - case 'b': tty->print("%d", (jbyte) value); break; - case 'c': tty->print("%c", (jchar) value); break; - case 's': tty->print("%d", (jshort) value); break; - case 'i': tty->print("%d", (jint) value); break; - case 'f': tty->print("%f", uu.f); break; - case 'j': tty->print(INT64_FORMAT, value); break; - case 'd': tty->print("%lf", uu.d); break; - default: assert(false, "unknown typeChar"); break; - } - if (newline) { - tty->cr(); - } -JRT_END - -// JVM_InitializeGraalRuntime -JVM_ENTRY(jobject, JVM_InitializeGraalRuntime(JNIEnv *env, jclass graalclass)) - return VMToCompiler::graalRuntimePermObject(); -JVM_END +/* + * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ + +#include "precompiled.hpp" +#include "runtime/interfaceSupport.hpp" +#include "prims/jvm.h" +#include "graal/graalRuntime.hpp" +#include "graal/graalVMToCompiler.hpp" +#include "asm/codeBuffer.hpp" +#include "runtime/biasedLocking.hpp" + +// Implementation of GraalStubAssembler + +GraalStubAssembler::GraalStubAssembler(CodeBuffer* code, const char * name, int stub_id) : MacroAssembler(code) { + _name = name; + _must_gc_arguments = false; + _frame_size = no_frame_size; + _num_rt_args = 0; + _stub_id = stub_id; +} + + +void GraalStubAssembler::set_info(const char* name, bool must_gc_arguments) { + _name = name; + _must_gc_arguments = must_gc_arguments; +} + + +void GraalStubAssembler::set_frame_size(int size) { + if (_frame_size == no_frame_size) { + _frame_size = size; + } + assert(_frame_size == size, "can't change the frame size"); +} + + +void GraalStubAssembler::set_num_rt_args(int args) { + if (_num_rt_args == 0) { + _num_rt_args = args; + } + assert(_num_rt_args == args, "can't change the number of args"); +} + +// Implementation of GraalRuntime + +CodeBlob* GraalRuntime::_blobs[GraalRuntime::number_of_ids]; +const char *GraalRuntime::_blob_names[] = { + GRAAL_STUBS(STUB_NAME, LAST_STUB_NAME) +}; + +// Simple helper to see if the caller of a runtime stub which +// entered the VM has been deoptimized + +static bool caller_is_deopted() { + JavaThread* thread = JavaThread::current(); + RegisterMap reg_map(thread, false); + frame runtime_frame = thread->last_frame(); + frame caller_frame = runtime_frame.sender(®_map); + assert(caller_frame.is_compiled_frame(), "must be compiled"); + return caller_frame.is_deoptimized_frame(); +} + +// Stress deoptimization +static void deopt_caller() { + if ( !caller_is_deopted()) { + JavaThread* thread = JavaThread::current(); + RegisterMap reg_map(thread, false); + frame runtime_frame = thread->last_frame(); + frame caller_frame = runtime_frame.sender(®_map); + Deoptimization::deoptimize_frame(thread, caller_frame.id(), Deoptimization::Reason_constraint); + assert(caller_is_deopted(), "Must be deoptimized"); + } +} + +static bool setup_code_buffer(CodeBuffer* code) { + // Preinitialize the consts section to some large size: + int locs_buffer_size = 1 * (relocInfo::length_limit + sizeof(relocInfo)); + char* locs_buffer = NEW_RESOURCE_ARRAY(char, locs_buffer_size); + code->insts()->initialize_shared_locs((relocInfo*)locs_buffer, + locs_buffer_size / sizeof(relocInfo)); + + // Global stubs have neither constants nor local stubs + code->initialize_consts_size(0); + code->initialize_stubs_size(0); + + return true; +} + +void GraalRuntime::generate_blob_for(BufferBlob* buffer_blob, StubID id) { + assert(0 <= id && id < number_of_ids, "illegal stub id"); + ResourceMark rm; + // create code buffer for code storage + CodeBuffer code(buffer_blob); + + setup_code_buffer(&code); + + // create assembler for code generation + GraalStubAssembler* sasm = new GraalStubAssembler(&code, name_for(id), id); + // generate code for runtime stub + OopMapSet* oop_maps; + oop_maps = generate_code_for(id, sasm); + assert(oop_maps == NULL || sasm->frame_size() != GraalStubAssembler::no_frame_size, + "if stub has an oop map it must have a valid frame size"); + +#ifdef ASSERT + // Make sure that stubs that need oopmaps have them + switch (id) { + // These stubs don't need to have an oopmap + case graal_slow_subtype_check_id: +#if defined(SPARC) || defined(PPC) + case handle_exception_nofpu_id: // Unused on sparc +#endif + case graal_verify_oop_id: + case graal_unwind_exception_call_id: + case graal_OSR_migration_end_id: + case graal_arithmetic_frem_id: + case graal_arithmetic_drem_id: + case graal_set_deopt_info_id: + break; + + // All other stubs should have oopmaps + default: + assert(oop_maps != NULL, "must have an oopmap"); + } +#endif + + // align so printing shows nop's instead of random code at the end (SimpleStubs are aligned) + sasm->align(BytesPerWord); + // make sure all code is in code buffer + sasm->flush(); + // create blob - distinguish a few special cases + CodeBlob* blob = RuntimeStub::new_runtime_stub(name_for(id), + &code, + CodeOffsets::frame_never_safe, + sasm->frame_size(), + oop_maps, + sasm->must_gc_arguments()); + // install blob + assert(blob != NULL, "blob must exist"); + _blobs[id] = blob; +} + + +void GraalRuntime::initialize(BufferBlob* blob) { + // generate stubs + for (int id = 0; id < number_of_ids; id++) generate_blob_for(blob, (StubID)id); + // printing +#ifndef PRODUCT + if (GraalPrintSimpleStubs) { + ResourceMark rm; + for (int id = 0; id < number_of_ids; id++) { + _blobs[id]->print(); + if (_blobs[id]->oop_maps() != NULL) { + _blobs[id]->oop_maps()->print(); + } + } + } +#endif +} + + +CodeBlob* GraalRuntime::blob_for(StubID id) { + assert(0 <= id && id < number_of_ids, "illegal stub id"); + return _blobs[id]; +} + + +const char* GraalRuntime::name_for(StubID id) { + assert(0 <= id && id < number_of_ids, "illegal stub id"); + return _blob_names[id]; +} + +const char* GraalRuntime::name_for_address(address entry) { + for (int id = 0; id < number_of_ids; id++) { + if (entry == entry_for((StubID)id)) return name_for((StubID)id); + } + +#define FUNCTION_CASE(a, f) \ + if ((intptr_t)a == CAST_FROM_FN_PTR(intptr_t, f)) return #f + + FUNCTION_CASE(entry, os::javaTimeMillis); + FUNCTION_CASE(entry, os::javaTimeNanos); + FUNCTION_CASE(entry, SharedRuntime::OSR_migration_end); + FUNCTION_CASE(entry, SharedRuntime::d2f); + FUNCTION_CASE(entry, SharedRuntime::d2i); + FUNCTION_CASE(entry, SharedRuntime::d2l); + FUNCTION_CASE(entry, SharedRuntime::dcos); + FUNCTION_CASE(entry, SharedRuntime::dexp); + FUNCTION_CASE(entry, SharedRuntime::dlog); + FUNCTION_CASE(entry, SharedRuntime::dlog10); + FUNCTION_CASE(entry, SharedRuntime::dpow); + FUNCTION_CASE(entry, SharedRuntime::drem); + FUNCTION_CASE(entry, SharedRuntime::dsin); + FUNCTION_CASE(entry, SharedRuntime::dtan); + FUNCTION_CASE(entry, SharedRuntime::f2i); + FUNCTION_CASE(entry, SharedRuntime::f2l); + FUNCTION_CASE(entry, SharedRuntime::frem); + FUNCTION_CASE(entry, SharedRuntime::l2d); + FUNCTION_CASE(entry, SharedRuntime::l2f); + FUNCTION_CASE(entry, SharedRuntime::ldiv); + FUNCTION_CASE(entry, SharedRuntime::lmul); + FUNCTION_CASE(entry, SharedRuntime::lrem); + FUNCTION_CASE(entry, SharedRuntime::lrem); + FUNCTION_CASE(entry, SharedRuntime::dtrace_method_entry); + FUNCTION_CASE(entry, SharedRuntime::dtrace_method_exit); +#ifdef TRACE_HAVE_INTRINSICS + FUNCTION_CASE(entry, TRACE_TIME_METHOD); +#endif + + ShouldNotReachHere(); + return NULL; + +#undef FUNCTION_CASE +} + + +JRT_ENTRY(void, GraalRuntime::new_instance(JavaThread* thread, Klass* klass)) + assert(klass->is_klass(), "not a class"); + instanceKlassHandle h(thread, klass); + h->check_valid_for_instantiation(true, CHECK); + // make sure klass is initialized + h->initialize(CHECK); + // allocate instance and return via TLS + oop obj = h->allocate_instance(CHECK); + thread->set_vm_result(obj); +JRT_END + +JRT_ENTRY(void, GraalRuntime::new_array(JavaThread* thread, Klass* array_klass, jint length)) + // Note: no handle for klass needed since they are not used + // anymore after new_objArray() and no GC can happen before. + // (This may have to change if this code changes!) + assert(array_klass->is_klass(), "not a class"); + oop obj; + if (array_klass->oop_is_typeArray()) { + BasicType elt_type = TypeArrayKlass::cast(array_klass)->element_type(); + obj = oopFactory::new_typeArray(elt_type, length, CHECK); + } else { + Klass* elem_klass = ObjArrayKlass::cast(array_klass)->element_klass(); + obj = oopFactory::new_objArray(elem_klass, length, CHECK); + } + thread->set_vm_result(obj); + // This is pretty rare but this runtime patch is stressful to deoptimization + // if we deoptimize here so force a deopt to stress the path. + if (DeoptimizeALot) { + deopt_caller(); + } +JRT_END + + +JRT_ENTRY(void, GraalRuntime::new_multi_array(JavaThread* thread, Klass* klass, int rank, jint* dims)) + assert(klass->is_klass(), "not a class"); + assert(rank >= 1, "rank must be nonzero"); + oop obj = ArrayKlass::cast(klass)->multi_allocate(rank, dims, CHECK); + thread->set_vm_result(obj); +JRT_END + +JRT_ENTRY(void, GraalRuntime::unimplemented_entry(JavaThread* thread, StubID id)) + tty->print_cr("GraalRuntime::entry_for(%d) returned unimplemented entry point", id); +JRT_END + +extern void vm_exit(int code); + +// Enter this method from compiled code handler below. This is where we transition +// to VM mode. This is done as a helper routine so that the method called directly +// from compiled code does not have to transition to VM. This allows the entry +// method to see if the nmethod that we have just looked up a handler for has +// been deoptimized while we were in the vm. This simplifies the assembly code +// cpu directories. +// +// We are entering here from exception stub (via the entry method below) +// If there is a compiled exception handler in this method, we will continue there; +// otherwise we will unwind the stack and continue at the caller of top frame method +// Note: we enter in Java using a special JRT wrapper. This wrapper allows us to +// control the area where we can allow a safepoint. After we exit the safepoint area we can +// check to see if the handler we are going to return is now in a nmethod that has +// been deoptimized. If that is the case we return the deopt blob +// unpack_with_exception entry instead. This makes life for the exception blob easier +// because making that same check and diverting is painful from assembly language. +JRT_ENTRY_NO_ASYNC(static address, exception_handler_for_pc_helper(JavaThread* thread, oopDesc* ex, address pc, nmethod*& nm)) + // Reset method handle flag. + thread->set_is_method_handle_return(false); + + Handle exception(thread, ex); + nm = CodeCache::find_nmethod(pc); + assert(nm != NULL, "this is not an nmethod"); + // Adjust the pc as needed/ + if (nm->is_deopt_pc(pc)) { + RegisterMap map(thread, false); + frame exception_frame = thread->last_frame().sender(&map); + // if the frame isn't deopted then pc must not correspond to the caller of last_frame + assert(exception_frame.is_deoptimized_frame(), "must be deopted"); + pc = exception_frame.pc(); + } +#ifdef ASSERT + assert(exception.not_null(), "NULL exceptions should be handled by throw_exception"); + assert(exception->is_oop(), "just checking"); + // Check that exception is a subclass of Throwable, otherwise we have a VerifyError + if (!(exception->is_a(SystemDictionary::Throwable_klass()))) { + if (ExitVMOnVerifyError) vm_exit(-1); + ShouldNotReachHere(); + } +#endif + + // Check the stack guard pages and reenable them if necessary and there is + // enough space on the stack to do so. Use fast exceptions only if the guard + // pages are enabled. + bool guard_pages_enabled = thread->stack_yellow_zone_enabled(); + if (!guard_pages_enabled) guard_pages_enabled = thread->reguard_stack(); + + if (JvmtiExport::can_post_on_exceptions()) { + // To ensure correct notification of exception catches and throws + // we have to deoptimize here. If we attempted to notify the + // catches and throws during this exception lookup it's possible + // we could deoptimize on the way out of the VM and end back in + // the interpreter at the throw site. This would result in double + // notifications since the interpreter would also notify about + // these same catches and throws as it unwound the frame. + + RegisterMap reg_map(thread); + frame stub_frame = thread->last_frame(); + frame caller_frame = stub_frame.sender(®_map); + + // We don't really want to deoptimize the nmethod itself since we + // can actually continue in the exception handler ourselves but I + // don't see an easy way to have the desired effect. + Deoptimization::deoptimize_frame(thread, caller_frame.id(), Deoptimization::Reason_constraint); + assert(caller_is_deopted(), "Must be deoptimized"); + + return SharedRuntime::deopt_blob()->unpack_with_exception_in_tls(); + } + + // ExceptionCache is used only for exceptions at call sites and not for implicit exceptions + if (guard_pages_enabled) { + address fast_continuation = nm->handler_for_exception_and_pc(exception, pc); + if (fast_continuation != NULL) { + // Set flag if return address is a method handle call site. + thread->set_is_method_handle_return(nm->is_method_handle_return(pc)); + return fast_continuation; + } + } + + // If the stack guard pages are enabled, check whether there is a handler in + // the current method. Otherwise (guard pages disabled), force an unwind and + // skip the exception cache update (i.e., just leave continuation==NULL). + address continuation = NULL; + if (guard_pages_enabled) { + + // New exception handling mechanism can support inlined methods + // with exception handlers since the mappings are from PC to PC + + // debugging support + // tracing + if (TraceExceptions) { + ttyLocker ttyl; + ResourceMark rm; + int offset = pc - nm->code_begin(); + tty->print_cr("Exception <%s> (0x%x) thrown in compiled method <%s> at PC " PTR_FORMAT " [" PTR_FORMAT "+%d] for thread 0x%x", + exception->print_value_string(), (address)exception(), nm->method()->print_value_string(), pc, nm->code_begin(), offset, thread); + } + // for AbortVMOnException flag + NOT_PRODUCT(Exceptions::debug_check_abort(exception)); + + // Clear out the exception oop and pc since looking up an + // exception handler can cause class loading, which might throw an + // exception and those fields are expected to be clear during + // normal bytecode execution. + thread->set_exception_oop(NULL); + thread->set_exception_pc(NULL); + + continuation = SharedRuntime::compute_compiled_exc_handler(nm, pc, exception, false, false); + // If an exception was thrown during exception dispatch, the exception oop may have changed + thread->set_exception_oop(exception()); + thread->set_exception_pc(pc); + + // the exception cache is used only by non-implicit exceptions + if (continuation != NULL && !SharedRuntime::deopt_blob()->contains(continuation)) { + nm->add_handler_for_exception_and_pc(exception, pc, continuation); + } + } + + thread->set_vm_result(exception()); + // Set flag if return address is a method handle call site. + thread->set_is_method_handle_return(nm->is_method_handle_return(pc)); + + if (TraceExceptions) { + ttyLocker ttyl; + ResourceMark rm; + tty->print_cr("Thread " PTR_FORMAT " continuing at PC " PTR_FORMAT " for exception thrown at PC " PTR_FORMAT, + thread, continuation, pc); + } + + return continuation; +JRT_END + +// Enter this method from compiled code only if there is a Java exception handler +// in the method handling the exception. +// We are entering here from exception stub. We don't do a normal VM transition here. +// We do it in a helper. This is so we can check to see if the nmethod we have just +// searched for an exception handler has been deoptimized in the meantime. +address GraalRuntime::exception_handler_for_pc(JavaThread* thread) { + oop exception = thread->exception_oop(); + address pc = thread->exception_pc(); + // Still in Java mode + DEBUG_ONLY(ResetNoHandleMark rnhm); + nmethod* nm = NULL; + address continuation = NULL; + { + // Enter VM mode by calling the helper + ResetNoHandleMark rnhm; + continuation = exception_handler_for_pc_helper(thread, exception, pc, nm); + } + // Back in JAVA, use no oops DON'T safepoint + + // Now check to see if the nmethod we were called from is now deoptimized. + // If so we must return to the deopt blob and deoptimize the nmethod + if (nm != NULL && caller_is_deopted()) { + continuation = SharedRuntime::deopt_blob()->unpack_with_exception_in_tls(); + } + + assert(continuation != NULL, "no handler found"); + return continuation; +} + +JRT_ENTRY(void, GraalRuntime::graal_create_null_exception(JavaThread* thread)) + thread->set_vm_result(Exceptions::new_exception(thread, vmSymbols::java_lang_NullPointerException(), NULL)()); +JRT_END + +JRT_ENTRY(void, GraalRuntime::graal_create_out_of_bounds_exception(JavaThread* thread, jint index)) + char message[jintAsStringSize]; + sprintf(message, "%d", index); + thread->set_vm_result(Exceptions::new_exception(thread, vmSymbols::java_lang_ArrayIndexOutOfBoundsException(), message)()); +JRT_END + +JRT_ENTRY_NO_ASYNC(void, GraalRuntime::graal_monitorenter(JavaThread* thread, oopDesc* obj, BasicLock* lock)) + if (TraceGraal >= 3) { + char type[O_BUFLEN]; + obj->klass()->name()->as_C_string(type, O_BUFLEN); + markOop mark = obj->mark(); + tty->print_cr("%s: entered locking slow case with obj=" INTPTR_FORMAT ", type=%s, mark=" INTPTR_FORMAT ", lock=" INTPTR_FORMAT, thread->name(), obj, type, mark, lock); + tty->flush(); + } +#ifdef ASSERT + if (PrintBiasedLockingStatistics) { + Atomic::inc(BiasedLocking::slow_path_entry_count_addr()); + } +#endif + Handle h_obj(thread, obj); + assert(h_obj()->is_oop(), "must be NULL or an object"); + if (UseBiasedLocking) { + // Retry fast entry if bias is revoked to avoid unnecessary inflation + ObjectSynchronizer::fast_enter(h_obj, lock, true, CHECK); + } else { + if (GraalUseFastLocking) { + // When using fast locking, the compiled code has already tried the fast case + ObjectSynchronizer::slow_enter(h_obj, lock, THREAD); + } else { + ObjectSynchronizer::fast_enter(h_obj, lock, false, THREAD); + } + } + if (TraceGraal >= 3) { + tty->print_cr("%s: exiting locking slow with obj=" INTPTR_FORMAT, thread->name(), obj); + } +JRT_END + + +JRT_LEAF(void, GraalRuntime::graal_monitorexit(JavaThread* thread, oopDesc* obj, BasicLock* lock)) + assert(thread == JavaThread::current(), "threads must correspond"); + assert(thread->last_Java_sp(), "last_Java_sp must be set"); + // monitorexit is non-blocking (leaf routine) => no exceptions can be thrown + EXCEPTION_MARK; + +#ifdef DEBUG + if (!obj->is_oop()) { + ResetNoHandleMark rhm; + nmethod* method = thread->last_frame().cb()->as_nmethod_or_null(); + if (method != NULL) { + tty->print_cr("ERROR in monitorexit in method %s wrong obj " INTPTR_FORMAT, method->name(), obj); + } + thread->print_stack_on(tty); + assert(false, "invalid lock object pointer dected"); + } +#endif + + if (GraalUseFastLocking) { + // When using fast locking, the compiled code has already tried the fast case + ObjectSynchronizer::slow_exit(obj, lock, THREAD); + } else { + ObjectSynchronizer::fast_exit(obj, lock, THREAD); + } + if (TraceGraal >= 3) { + char type[O_BUFLEN]; + obj->klass()->name()->as_C_string(type, O_BUFLEN); + tty->print_cr("%s: exited locking slow case with obj=" INTPTR_FORMAT ", type=%s, mark=" INTPTR_FORMAT ", lock=" INTPTR_FORMAT, thread->name(), obj, type, obj->mark(), lock); + tty->flush(); + } +JRT_END + +JRT_ENTRY(void, GraalRuntime::graal_log_object(JavaThread* thread, oop obj, jint flags)) + bool string = mask_bits_are_true(flags, LOG_OBJECT_STRING); + bool address = mask_bits_are_true(flags, LOG_OBJECT_ADDRESS); + bool newline = mask_bits_are_true(flags, LOG_OBJECT_NEWLINE); + if (!string) { + if (!address && obj->is_oop_or_null(true)) { + char buf[O_BUFLEN]; + tty->print("%s@%p", obj->klass()->name()->as_C_string(buf, O_BUFLEN), obj); + } else { + tty->print("%p", obj); + } + } else { + ResourceMark rm; + assert(obj != NULL && java_lang_String::is_instance(obj), "must be"); + char *buf = java_lang_String::as_utf8_string(obj); + tty->print(buf); + } + if (newline) { + tty->cr(); + } +JRT_END + +JRT_ENTRY(void, GraalRuntime::graal_vm_error(JavaThread* thread, oop where, oop format, jlong value)) + ResourceMark rm; + assert(where == NULL || java_lang_String::is_instance(where), "must be"); + const char *error_msg = where == NULL ? "" : java_lang_String::as_utf8_string(where); + char *detail_msg = NULL; + if (format != NULL) { + const char* buf = java_lang_String::as_utf8_string(format); + size_t detail_msg_length = strlen(buf) * 2; + detail_msg = (char *) NEW_RESOURCE_ARRAY(u_char, detail_msg_length); + jio_snprintf(detail_msg, detail_msg_length, buf, value); + } + report_vm_error(__FILE__, __LINE__, error_msg, detail_msg); +JRT_END + +JRT_ENTRY(void, GraalRuntime::graal_log_printf(JavaThread* thread, oop format, jlong val)) + ResourceMark rm; + assert(format != NULL && java_lang_String::is_instance(format), "must be"); + char *buf = java_lang_String::as_utf8_string(format); + tty->print(buf, val); +JRT_END + +JRT_ENTRY(void, GraalRuntime::graal_log_primitive(JavaThread* thread, jchar typeChar, jlong value, jboolean newline)) + union { + jlong l; + jdouble d; + jfloat f; + } uu; + uu.l = value; + switch (typeChar) { + case 'z': tty->print(value == 0 ? "false" : "true"); break; + case 'b': tty->print("%d", (jbyte) value); break; + case 'c': tty->print("%c", (jchar) value); break; + case 's': tty->print("%d", (jshort) value); break; + case 'i': tty->print("%d", (jint) value); break; + case 'f': tty->print("%f", uu.f); break; + case 'j': tty->print(INT64_FORMAT, value); break; + case 'd': tty->print("%lf", uu.d); break; + default: assert(false, "unknown typeChar"); break; + } + if (newline) { + tty->cr(); + } +JRT_END + +JRT_ENTRY(jint, GraalRuntime::graal_identity_hash_code(JavaThread* thread, oop obj)) + return (jint) obj->identity_hash(); +JRT_END + +JRT_ENTRY(jboolean, GraalRuntime::graal_thread_is_interrupted(JavaThread* thread, oop receiver, jboolean clear_interrupted)) + // Ensure that the C++ Thread and OSThread structures aren't freed before we operate + Handle receiverHandle(thread, receiver); + JRT_BLOCK + MutexLockerEx ml(thread->threadObj() == receiver ? NULL : Threads_lock); + JavaThread* receiverThread = java_lang_Thread::thread(receiverHandle()); + return (jint) Thread::is_interrupted(receiverThread, clear_interrupted != 0); + JRT_BLOCK_END +JRT_END + +// JVM_InitializeGraalRuntime +JVM_ENTRY(jobject, JVM_InitializeGraalRuntime(JNIEnv *env, jclass graalclass)) + return VMToCompiler::graalRuntimePermObject(); +JVM_END diff -r 2ed8d74e5984 -r 8a3efb8c831d src/share/vm/graal/graalRuntime.hpp --- a/src/share/vm/graal/graalRuntime.hpp Fri Dec 14 10:20:54 2012 +0100 +++ b/src/share/vm/graal/graalRuntime.hpp Fri Dec 14 13:02:49 2012 +0100 @@ -100,6 +100,8 @@ stub(graal_log_object) \ stub(graal_log_printf) \ stub(graal_log_primitive) \ + stub(graal_identity_hash_code) \ + stub(graal_thread_is_interrupted) \ last_entry(number_of_ids) #define DECLARE_STUB_ID(x) x ## _id , @@ -144,6 +146,9 @@ static void graal_vm_error(JavaThread* thread, oop where, oop format, jlong value); static void graal_log_printf(JavaThread* thread, oop format, jlong value); static void graal_log_primitive(JavaThread* thread, jchar typeChar, jlong value, jboolean newline); + + static jint graal_identity_hash_code(JavaThread* thread, oopDesc* objd); + static jboolean graal_thread_is_interrupted(JavaThread* thread, oopDesc* obj, jboolean clear_interrupte); // Note: Must be kept in sync with constants in com.oracle.graal.snippets.Log enum { diff -r 2ed8d74e5984 -r 8a3efb8c831d src/share/vm/runtime/compilationPolicy.cpp --- a/src/share/vm/runtime/compilationPolicy.cpp Fri Dec 14 10:20:54 2012 +0100 +++ b/src/share/vm/runtime/compilationPolicy.cpp Fri Dec 14 13:02:49 2012 +0100 @@ -493,6 +493,12 @@ const char* comment = "backedge_count"; if (is_compilation_enabled() && !m->is_not_osr_compilable() && can_be_compiled(m)) { + if (TraceCompilationPolicy) { + tty->print("backedge invocation trigger: "); + m->print_short_name(tty); + tty->print_cr(" ( interpreted " INTPTR_FORMAT ", size=%d, hotCount=%d ) ", (address)m(), m->code_size(), hot_count); + } + CompileBroker::compile_method(m, bci, CompLevel_highest_tier, m, hot_count, comment, thread); NOT_PRODUCT(trace_osr_completion(m->lookup_osr_nmethod_for(bci, CompLevel_highest_tier, true));) diff -r 2ed8d74e5984 -r 8a3efb8c831d src/share/vm/runtime/deoptimization.cpp --- a/src/share/vm/runtime/deoptimization.cpp Fri Dec 14 10:20:54 2012 +0100 +++ b/src/share/vm/runtime/deoptimization.cpp Fri Dec 14 13:02:49 2012 +0100 @@ -868,13 +868,19 @@ } } -typedef struct { - int offset; - BasicType type; -} ReassignedField; +class ReassignedField { +public: + int _offset; + BasicType _type; +public: + ReassignedField() { + _offset = 0; + _type = T_ILLEGAL; + } +}; int compare(ReassignedField* left, ReassignedField* right) { - return left->offset - right->offset; + return left->_offset - right->_offset; } // Restore fields of an eliminated instance object using the same field order @@ -888,8 +894,8 @@ for (AllFieldStream fs(klass); !fs.done(); fs.next()) { if (!fs.access_flags().is_static()) { ReassignedField field; - field.offset = fs.offset(); - field.type = FieldType::basic_type(fs.signature()); + field._offset = fs.offset(); + field._type = FieldType::basic_type(fs.signature()); fields->append(field); } } @@ -897,8 +903,8 @@ for (int i = 0; i < fields->length(); i++) { intptr_t val; StackValue* value = StackValue::create_stack_value(fr, reg_map, sv->field_at(svIndex)); - int offset = fields->at(i).offset; - BasicType type = fields->at(i).type; + int offset = fields->at(i)._offset; + BasicType type = fields->at(i)._type; switch (type) { case T_OBJECT: case T_ARRAY: assert(value->type() == T_OBJECT, "Agreement."); diff -r 2ed8d74e5984 -r 8a3efb8c831d src/share/vm/runtime/globals.hpp --- a/src/share/vm/runtime/globals.hpp Fri Dec 14 10:20:54 2012 +0100 +++ b/src/share/vm/runtime/globals.hpp Fri Dec 14 13:02:49 2012 +0100 @@ -2927,7 +2927,7 @@ "if non-zero, start verifying C heap after Nth call to " \ "malloc/realloc/free") \ \ - product(intx, TypeProfileWidth, 2, \ + product_pd(intx, TypeProfileWidth, \ "number of receiver types to record in call/cast profile") \ \ develop(intx, BciProfileWidth, 2, \ diff -r 2ed8d74e5984 -r 8a3efb8c831d src/share/vm/runtime/vframeArray.cpp --- a/src/share/vm/runtime/vframeArray.cpp Fri Dec 14 10:20:54 2012 +0100 +++ b/src/share/vm/runtime/vframeArray.cpp Fri Dec 14 13:02:49 2012 +0100 @@ -311,6 +311,10 @@ } } + if (PrintDeoptimizationDetails) { + tty->print_cr("Expressions size: %d", expressions()->size()); + } + // Unpack expression stack // If this is an intermediate frame (i.e. not top frame) then this // only unpacks the part of the expression stack not used by callee @@ -323,9 +327,25 @@ switch(value->type()) { case T_INT: *addr = value->get_int(); +#ifndef PRODUCT + if (PrintDeoptimizationDetails) { + tty->print_cr("Reconstructed expression %d (INT): %d", i, (int)(*addr)); + } +#endif break; case T_OBJECT: *addr = value->get_int(T_OBJECT); +#ifndef PRODUCT + if (PrintDeoptimizationDetails) { + tty->print("Reconstructed expression %d (OBJECT): ", i); + oop o = (oop)(*addr); + if (o == NULL) { + tty->print_cr("NULL"); + } else { + tty->print_cr(err_msg("%s", o->klass()->name()->as_C_string())); + } + } +#endif break; case T_CONFLICT: // A dead stack slot. Initialize to null in case it is an oop. @@ -344,9 +364,25 @@ switch(value->type()) { case T_INT: *addr = value->get_int(); +#ifndef PRODUCT + if (PrintDeoptimizationDetails) { + tty->print_cr("Reconstructed local %d (INT): %d", i, (int)(*addr)); + } +#endif break; case T_OBJECT: *addr = value->get_int(T_OBJECT); +#ifndef PRODUCT + if (PrintDeoptimizationDetails) { + tty->print("Reconstructed local %d (OBJECT): ", i); + oop o = (oop)(*addr); + if (o == NULL) { + tty->print_cr("NULL"); + } else { + tty->print_cr(err_msg("%s", o->klass()->name()->as_C_string())); + } + } +#endif break; case T_CONFLICT: // A dead location. If it is an oop then we need a NULL to prevent GC from following it @@ -388,18 +424,13 @@ } #ifndef PRODUCT - if (TraceDeoptimization && Verbose) { + if (PrintDeoptimizationDetails) { ttyLocker ttyl; tty->print_cr("[%d Interpreted Frame]", ++unpack_counter); iframe()->print_on(tty); RegisterMap map(thread); vframe* f = vframe::new_vframe(iframe(), &map, thread); f->print(); - - tty->print_cr("locals size %d", locals()->size()); - tty->print_cr("expression size %d", expressions()->size()); - - method()->print_value(); tty->cr(); // method()->print_codes(); } else if (TraceDeoptimization) {